var/home/core/zuul-output/0000755000175000017500000000000015125605540014530 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015125610173015472 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003600756715125610164017714 0ustar rootrootJan 02 00:07:04 crc systemd[1]: Starting Kubernetes Kubelet... Jan 02 00:07:04 crc restorecon[4691]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 02 00:07:05 crc restorecon[4691]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 02 00:07:05 crc restorecon[4691]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jan 02 00:07:05 crc kubenswrapper[4766]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 02 00:07:05 crc kubenswrapper[4766]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jan 02 00:07:05 crc kubenswrapper[4766]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 02 00:07:05 crc kubenswrapper[4766]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 02 00:07:05 crc kubenswrapper[4766]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jan 02 00:07:05 crc kubenswrapper[4766]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.359637 4766 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365242 4766 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365275 4766 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365287 4766 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365298 4766 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365307 4766 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365315 4766 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365324 4766 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365335 4766 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365345 4766 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365354 4766 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365363 4766 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365372 4766 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365380 4766 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365388 4766 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365396 4766 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365404 4766 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365411 4766 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365419 4766 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365427 4766 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365435 4766 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365442 4766 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365450 4766 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365458 4766 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365466 4766 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365474 4766 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365483 4766 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365490 4766 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365509 4766 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365516 4766 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365524 4766 feature_gate.go:330] unrecognized feature gate: Example Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365532 4766 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365540 4766 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365547 4766 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365555 4766 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365563 4766 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365570 4766 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365578 4766 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365585 4766 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365593 4766 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365601 4766 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365609 4766 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365617 4766 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365624 4766 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365632 4766 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365641 4766 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365649 4766 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365656 4766 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365664 4766 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365671 4766 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365678 4766 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365686 4766 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365694 4766 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365701 4766 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365709 4766 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365716 4766 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365724 4766 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365732 4766 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365740 4766 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365748 4766 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365756 4766 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365764 4766 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365772 4766 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365780 4766 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365787 4766 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365797 4766 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365809 4766 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365817 4766 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365826 4766 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365835 4766 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365866 4766 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.365875 4766 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366009 4766 flags.go:64] FLAG: --address="0.0.0.0" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366025 4766 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366050 4766 flags.go:64] FLAG: --anonymous-auth="true" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366062 4766 flags.go:64] FLAG: --application-metrics-count-limit="100" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366073 4766 flags.go:64] FLAG: --authentication-token-webhook="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366083 4766 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366094 4766 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366106 4766 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366116 4766 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366125 4766 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366134 4766 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366144 4766 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366153 4766 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366162 4766 flags.go:64] FLAG: --cgroup-root="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366170 4766 flags.go:64] FLAG: --cgroups-per-qos="true" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366180 4766 flags.go:64] FLAG: --client-ca-file="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366224 4766 flags.go:64] FLAG: --cloud-config="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366233 4766 flags.go:64] FLAG: --cloud-provider="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366242 4766 flags.go:64] FLAG: --cluster-dns="[]" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366253 4766 flags.go:64] FLAG: --cluster-domain="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366262 4766 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366273 4766 flags.go:64] FLAG: --config-dir="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366281 4766 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366291 4766 flags.go:64] FLAG: --container-log-max-files="5" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366302 4766 flags.go:64] FLAG: --container-log-max-size="10Mi" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366311 4766 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366320 4766 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366329 4766 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366338 4766 flags.go:64] FLAG: --contention-profiling="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366347 4766 flags.go:64] FLAG: --cpu-cfs-quota="true" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366356 4766 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366366 4766 flags.go:64] FLAG: --cpu-manager-policy="none" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366374 4766 flags.go:64] FLAG: --cpu-manager-policy-options="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366385 4766 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366394 4766 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366403 4766 flags.go:64] FLAG: --enable-debugging-handlers="true" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366412 4766 flags.go:64] FLAG: --enable-load-reader="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366420 4766 flags.go:64] FLAG: --enable-server="true" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366430 4766 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366441 4766 flags.go:64] FLAG: --event-burst="100" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366451 4766 flags.go:64] FLAG: --event-qps="50" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366460 4766 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366469 4766 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366478 4766 flags.go:64] FLAG: --eviction-hard="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366489 4766 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366498 4766 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366507 4766 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366517 4766 flags.go:64] FLAG: --eviction-soft="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366526 4766 flags.go:64] FLAG: --eviction-soft-grace-period="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366535 4766 flags.go:64] FLAG: --exit-on-lock-contention="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366544 4766 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366553 4766 flags.go:64] FLAG: --experimental-mounter-path="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366562 4766 flags.go:64] FLAG: --fail-cgroupv1="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366571 4766 flags.go:64] FLAG: --fail-swap-on="true" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366581 4766 flags.go:64] FLAG: --feature-gates="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366591 4766 flags.go:64] FLAG: --file-check-frequency="20s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366600 4766 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366610 4766 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366619 4766 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366628 4766 flags.go:64] FLAG: --healthz-port="10248" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366637 4766 flags.go:64] FLAG: --help="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366646 4766 flags.go:64] FLAG: --hostname-override="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366655 4766 flags.go:64] FLAG: --housekeeping-interval="10s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366665 4766 flags.go:64] FLAG: --http-check-frequency="20s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366674 4766 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366684 4766 flags.go:64] FLAG: --image-credential-provider-config="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366693 4766 flags.go:64] FLAG: --image-gc-high-threshold="85" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366702 4766 flags.go:64] FLAG: --image-gc-low-threshold="80" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366711 4766 flags.go:64] FLAG: --image-service-endpoint="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366719 4766 flags.go:64] FLAG: --kernel-memcg-notification="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366728 4766 flags.go:64] FLAG: --kube-api-burst="100" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366737 4766 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366746 4766 flags.go:64] FLAG: --kube-api-qps="50" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366755 4766 flags.go:64] FLAG: --kube-reserved="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366765 4766 flags.go:64] FLAG: --kube-reserved-cgroup="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366773 4766 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366782 4766 flags.go:64] FLAG: --kubelet-cgroups="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366792 4766 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366801 4766 flags.go:64] FLAG: --lock-file="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366811 4766 flags.go:64] FLAG: --log-cadvisor-usage="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366820 4766 flags.go:64] FLAG: --log-flush-frequency="5s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366829 4766 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366865 4766 flags.go:64] FLAG: --log-json-split-stream="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366875 4766 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366884 4766 flags.go:64] FLAG: --log-text-split-stream="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366894 4766 flags.go:64] FLAG: --logging-format="text" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366902 4766 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366912 4766 flags.go:64] FLAG: --make-iptables-util-chains="true" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366921 4766 flags.go:64] FLAG: --manifest-url="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366930 4766 flags.go:64] FLAG: --manifest-url-header="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366941 4766 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366950 4766 flags.go:64] FLAG: --max-open-files="1000000" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366960 4766 flags.go:64] FLAG: --max-pods="110" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366970 4766 flags.go:64] FLAG: --maximum-dead-containers="-1" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366979 4766 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366988 4766 flags.go:64] FLAG: --memory-manager-policy="None" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.366997 4766 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367006 4766 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367015 4766 flags.go:64] FLAG: --node-ip="192.168.126.11" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367024 4766 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367044 4766 flags.go:64] FLAG: --node-status-max-images="50" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367053 4766 flags.go:64] FLAG: --node-status-update-frequency="10s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367062 4766 flags.go:64] FLAG: --oom-score-adj="-999" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367071 4766 flags.go:64] FLAG: --pod-cidr="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367080 4766 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367094 4766 flags.go:64] FLAG: --pod-manifest-path="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367103 4766 flags.go:64] FLAG: --pod-max-pids="-1" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367112 4766 flags.go:64] FLAG: --pods-per-core="0" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367122 4766 flags.go:64] FLAG: --port="10250" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367131 4766 flags.go:64] FLAG: --protect-kernel-defaults="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367139 4766 flags.go:64] FLAG: --provider-id="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367148 4766 flags.go:64] FLAG: --qos-reserved="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367157 4766 flags.go:64] FLAG: --read-only-port="10255" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367166 4766 flags.go:64] FLAG: --register-node="true" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367175 4766 flags.go:64] FLAG: --register-schedulable="true" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367185 4766 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367199 4766 flags.go:64] FLAG: --registry-burst="10" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367208 4766 flags.go:64] FLAG: --registry-qps="5" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367216 4766 flags.go:64] FLAG: --reserved-cpus="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367225 4766 flags.go:64] FLAG: --reserved-memory="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367236 4766 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367245 4766 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367254 4766 flags.go:64] FLAG: --rotate-certificates="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367263 4766 flags.go:64] FLAG: --rotate-server-certificates="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367272 4766 flags.go:64] FLAG: --runonce="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367281 4766 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367290 4766 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367299 4766 flags.go:64] FLAG: --seccomp-default="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367308 4766 flags.go:64] FLAG: --serialize-image-pulls="true" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367317 4766 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367326 4766 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367335 4766 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367345 4766 flags.go:64] FLAG: --storage-driver-password="root" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367353 4766 flags.go:64] FLAG: --storage-driver-secure="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367362 4766 flags.go:64] FLAG: --storage-driver-table="stats" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367371 4766 flags.go:64] FLAG: --storage-driver-user="root" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367380 4766 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367389 4766 flags.go:64] FLAG: --sync-frequency="1m0s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367398 4766 flags.go:64] FLAG: --system-cgroups="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367406 4766 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367420 4766 flags.go:64] FLAG: --system-reserved-cgroup="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367429 4766 flags.go:64] FLAG: --tls-cert-file="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367438 4766 flags.go:64] FLAG: --tls-cipher-suites="[]" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367448 4766 flags.go:64] FLAG: --tls-min-version="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367456 4766 flags.go:64] FLAG: --tls-private-key-file="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367465 4766 flags.go:64] FLAG: --topology-manager-policy="none" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367474 4766 flags.go:64] FLAG: --topology-manager-policy-options="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367483 4766 flags.go:64] FLAG: --topology-manager-scope="container" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367492 4766 flags.go:64] FLAG: --v="2" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367503 4766 flags.go:64] FLAG: --version="false" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367516 4766 flags.go:64] FLAG: --vmodule="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367526 4766 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.367536 4766 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367726 4766 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367737 4766 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367746 4766 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367755 4766 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367763 4766 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367773 4766 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367783 4766 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367791 4766 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367800 4766 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367808 4766 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367816 4766 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367824 4766 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367832 4766 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367864 4766 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367875 4766 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367883 4766 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367892 4766 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367900 4766 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367909 4766 feature_gate.go:330] unrecognized feature gate: Example Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367918 4766 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367926 4766 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367934 4766 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367941 4766 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367949 4766 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367957 4766 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367964 4766 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367972 4766 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367979 4766 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367992 4766 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.367999 4766 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368007 4766 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368015 4766 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368024 4766 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368031 4766 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368041 4766 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368048 4766 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368056 4766 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368064 4766 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368071 4766 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368082 4766 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368090 4766 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368098 4766 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368106 4766 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368113 4766 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368121 4766 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368129 4766 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368137 4766 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368145 4766 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368152 4766 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368160 4766 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368168 4766 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368175 4766 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368183 4766 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368190 4766 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368198 4766 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368206 4766 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368213 4766 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368221 4766 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368228 4766 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368236 4766 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368247 4766 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368256 4766 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368265 4766 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368273 4766 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368281 4766 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368289 4766 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368297 4766 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368304 4766 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368312 4766 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368320 4766 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.368330 4766 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.368345 4766 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.379667 4766 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.379698 4766 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379817 4766 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379829 4766 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379836 4766 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379859 4766 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379864 4766 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379869 4766 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379875 4766 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379881 4766 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379886 4766 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379892 4766 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379898 4766 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379904 4766 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379909 4766 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379914 4766 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379918 4766 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379924 4766 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379929 4766 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379936 4766 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379942 4766 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379948 4766 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379953 4766 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379957 4766 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379962 4766 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379967 4766 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379972 4766 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379980 4766 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379984 4766 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379989 4766 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379994 4766 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.379998 4766 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380003 4766 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380008 4766 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380013 4766 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380018 4766 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380023 4766 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380028 4766 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380032 4766 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380037 4766 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380042 4766 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380046 4766 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380051 4766 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380055 4766 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380060 4766 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380065 4766 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380070 4766 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380074 4766 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380080 4766 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380084 4766 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380089 4766 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380094 4766 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380098 4766 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380103 4766 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380108 4766 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380112 4766 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380117 4766 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380121 4766 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380126 4766 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380131 4766 feature_gate.go:330] unrecognized feature gate: Example Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380135 4766 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380140 4766 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380144 4766 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380150 4766 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380154 4766 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380159 4766 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380163 4766 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380168 4766 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380172 4766 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380176 4766 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380181 4766 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380185 4766 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380190 4766 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.380198 4766 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380355 4766 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380367 4766 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380372 4766 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380378 4766 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380383 4766 feature_gate.go:330] unrecognized feature gate: Example Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380387 4766 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380392 4766 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380396 4766 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380401 4766 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380405 4766 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380409 4766 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380413 4766 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380417 4766 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380421 4766 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380426 4766 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380430 4766 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380434 4766 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380439 4766 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380443 4766 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380447 4766 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380451 4766 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380455 4766 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380459 4766 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380464 4766 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380469 4766 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380475 4766 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380479 4766 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380483 4766 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380488 4766 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380493 4766 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380497 4766 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380501 4766 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380505 4766 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380509 4766 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380514 4766 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380518 4766 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380523 4766 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380527 4766 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380532 4766 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380536 4766 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380543 4766 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380550 4766 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380556 4766 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380561 4766 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380566 4766 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380570 4766 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380575 4766 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380579 4766 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380584 4766 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380588 4766 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380593 4766 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380598 4766 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380602 4766 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380607 4766 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380611 4766 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380616 4766 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380622 4766 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380627 4766 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380633 4766 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380639 4766 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380644 4766 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380651 4766 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380658 4766 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380664 4766 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380668 4766 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380673 4766 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380677 4766 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380682 4766 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380686 4766 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380690 4766 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.380695 4766 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.380703 4766 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.381134 4766 server.go:940] "Client rotation is on, will bootstrap in background" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.384060 4766 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.384156 4766 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.384778 4766 server.go:997] "Starting client certificate rotation" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.384801 4766 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.385390 4766 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-20 02:34:29.508431631 +0000 UTC Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.385523 4766 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.395020 4766 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 02 00:07:05 crc kubenswrapper[4766]: E0102 00:07:05.397470 4766 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.398160 4766 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.411482 4766 log.go:25] "Validated CRI v1 runtime API" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.428510 4766 log.go:25] "Validated CRI v1 image API" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.430444 4766 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.432945 4766 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-01-02-00-02-39-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.432972 4766 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.455511 4766 manager.go:217] Machine: {Timestamp:2026-01-02 00:07:05.453596118 +0000 UTC m=+0.252083352 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:7aff2e2a-6602-4011-9647-4cd49a97a1f5 BootID:128f0816-59c2-4555-a5b2-57e34966d0da Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:57:51:bf Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:57:51:bf Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:8a:f2:0f Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:80:a2:fc Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:d0:33:90 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:f3:e6:ca Speed:-1 Mtu:1496} {Name:eth10 MacAddress:d6:48:ed:89:29:f4 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:aa:60:66:90:4b:8b Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.455778 4766 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.455921 4766 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.456598 4766 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.456806 4766 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.456904 4766 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.457169 4766 topology_manager.go:138] "Creating topology manager with none policy" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.457181 4766 container_manager_linux.go:303] "Creating device plugin manager" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.457319 4766 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.457441 4766 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.457615 4766 state_mem.go:36] "Initialized new in-memory state store" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.458050 4766 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.458861 4766 kubelet.go:418] "Attempting to sync node with API server" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.458885 4766 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.458962 4766 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.458980 4766 kubelet.go:324] "Adding apiserver pod source" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.458994 4766 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.460857 4766 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.463836 4766 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.463968 4766 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Jan 02 00:07:05 crc kubenswrapper[4766]: E0102 00:07:05.464073 4766 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.464047 4766 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Jan 02 00:07:05 crc kubenswrapper[4766]: E0102 00:07:05.464170 4766 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.464753 4766 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.465392 4766 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.465421 4766 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.465431 4766 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.465440 4766 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.465456 4766 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.465465 4766 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.465474 4766 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.465488 4766 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.465498 4766 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.465507 4766 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.465539 4766 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.465547 4766 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.465797 4766 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.466335 4766 server.go:1280] "Started kubelet" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.466903 4766 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.467052 4766 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Jan 02 00:07:05 crc systemd[1]: Started Kubernetes Kubelet. Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.468112 4766 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.469668 4766 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jan 02 00:07:05 crc kubenswrapper[4766]: E0102 00:07:05.472317 4766 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.200:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1886c1498f0d9d7f default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-02 00:07:05.466305919 +0000 UTC m=+0.264793083,LastTimestamp:2026-01-02 00:07:05.466305919 +0000 UTC m=+0.264793083,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.473223 4766 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.473257 4766 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.473326 4766 server.go:460] "Adding debug handlers to kubelet server" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.473473 4766 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 19:18:22.653151474 +0000 UTC Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.473871 4766 volume_manager.go:287] "The desired_state_of_world populator starts" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.473890 4766 volume_manager.go:289] "Starting Kubelet Volume Manager" Jan 02 00:07:05 crc kubenswrapper[4766]: E0102 00:07:05.473912 4766 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.473979 4766 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jan 02 00:07:05 crc kubenswrapper[4766]: E0102 00:07:05.474588 4766 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" interval="200ms" Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.476730 4766 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Jan 02 00:07:05 crc kubenswrapper[4766]: E0102 00:07:05.476798 4766 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.481722 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.481770 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.481782 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.481792 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.481801 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.481811 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.481822 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.481832 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.481870 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.481884 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.481897 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.481912 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.481925 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.481939 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.481967 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.481980 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.481992 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482005 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482014 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482024 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482032 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482041 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482049 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482057 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482067 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482075 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482086 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482101 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482110 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482132 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482142 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482151 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482160 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482170 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482179 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482187 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482199 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482219 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482229 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482238 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482247 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482256 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482266 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482343 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482360 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482370 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482381 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482392 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482402 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482414 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482426 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482439 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482461 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482472 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482484 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482497 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482511 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482524 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482535 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482544 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482553 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482561 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482571 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482582 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482594 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482607 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482617 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482626 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482640 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482652 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482664 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482673 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482682 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482691 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482700 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482708 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482717 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482726 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482734 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482743 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482752 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482761 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482769 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482777 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482786 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482796 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482805 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482814 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482822 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482832 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482869 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482879 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482889 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482898 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482907 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482915 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482924 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482933 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482942 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482950 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482973 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482984 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.482994 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483003 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483019 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483037 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483052 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483064 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483075 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483085 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483095 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483017 4766 factory.go:153] Registering CRI-O factory Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483125 4766 factory.go:221] Registration of the crio container factory successfully Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483106 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483263 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483208 4766 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483293 4766 factory.go:55] Registering systemd factory Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483300 4766 factory.go:221] Registration of the systemd container factory successfully Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483331 4766 factory.go:103] Registering Raw factory Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483294 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483343 4766 manager.go:1196] Started watching for new ooms in manager Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483353 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483366 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483377 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483388 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483397 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483406 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483416 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483426 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483436 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483448 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483458 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483466 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483476 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483488 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483498 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483507 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483518 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483529 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483539 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483549 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483561 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483570 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483579 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483588 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483599 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483608 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483617 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483627 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483637 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483648 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483657 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483667 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483677 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483686 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483696 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483706 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483715 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483724 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483734 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483744 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483754 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483766 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483775 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483784 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483794 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483804 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483816 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483827 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483874 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483886 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483897 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483906 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483918 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483927 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483937 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483948 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483958 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483968 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483976 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483985 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.483994 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.484010 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.484019 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.484027 4766 manager.go:319] Starting recovery of all containers Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.484029 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485287 4766 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485324 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485340 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485354 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485369 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485412 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485429 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485442 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485457 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485470 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485483 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485496 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485508 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485521 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485535 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485576 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485596 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485609 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485623 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485635 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485648 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485661 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485673 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485686 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485700 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485737 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485753 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485767 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485783 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485794 4766 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485806 4766 reconstruct.go:97] "Volume reconstruction finished" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.485814 4766 reconciler.go:26] "Reconciler: start to sync state" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.505285 4766 manager.go:324] Recovery completed Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.516200 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.520270 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.520352 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.520377 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.521592 4766 cpu_manager.go:225] "Starting CPU manager" policy="none" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.521628 4766 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.521670 4766 state_mem.go:36] "Initialized new in-memory state store" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.530788 4766 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.532140 4766 policy_none.go:49] "None policy: Start" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.532707 4766 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.532770 4766 status_manager.go:217] "Starting to sync pod status with apiserver" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.532806 4766 kubelet.go:2335] "Starting kubelet main sync loop" Jan 02 00:07:05 crc kubenswrapper[4766]: E0102 00:07:05.532946 4766 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.533015 4766 memory_manager.go:170] "Starting memorymanager" policy="None" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.533041 4766 state_mem.go:35] "Initializing new in-memory state store" Jan 02 00:07:05 crc kubenswrapper[4766]: W0102 00:07:05.534101 4766 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Jan 02 00:07:05 crc kubenswrapper[4766]: E0102 00:07:05.534144 4766 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Jan 02 00:07:05 crc kubenswrapper[4766]: E0102 00:07:05.574149 4766 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.598616 4766 manager.go:334] "Starting Device Plugin manager" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.598668 4766 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.598681 4766 server.go:79] "Starting device plugin registration server" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.599323 4766 eviction_manager.go:189] "Eviction manager: starting control loop" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.599341 4766 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.599546 4766 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.599701 4766 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.599711 4766 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jan 02 00:07:05 crc kubenswrapper[4766]: E0102 00:07:05.610183 4766 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.633946 4766 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.634055 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.636060 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.636088 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.636097 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.636199 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.636645 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.636680 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.636754 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.636769 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.636777 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.636875 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.637120 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.637188 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.637500 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.637526 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.637536 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.637619 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.637809 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.637877 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.638034 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.638054 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.638062 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.638645 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.638665 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.638673 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.638677 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.638699 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.638713 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.638727 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.638751 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.638776 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.638955 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.639134 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.639188 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.639797 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.639820 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.639830 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.640042 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.640069 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.640080 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.640080 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.640252 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.642785 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.642818 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.642870 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:05 crc kubenswrapper[4766]: E0102 00:07:05.675406 4766 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" interval="400ms" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.690797 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.690864 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.690886 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.690906 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.690936 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.690952 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.690967 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.690983 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.691003 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.691018 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.691033 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.691049 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.691064 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.691079 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.691098 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.701739 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.703936 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.703964 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.703974 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.703993 4766 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 02 00:07:05 crc kubenswrapper[4766]: E0102 00:07:05.704335 4766 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.200:6443: connect: connection refused" node="crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.792446 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.792510 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.792542 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.792587 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.792623 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.792652 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.792680 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.792708 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.792735 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.792753 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.792819 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.792903 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.792763 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.793009 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.793050 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.793077 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.793101 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.793122 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.792959 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.793158 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.793160 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.793485 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.793545 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.793605 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.793562 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.793545 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.793687 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.793655 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.793784 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.793918 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.905479 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.907076 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.907126 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.907146 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.907178 4766 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 02 00:07:05 crc kubenswrapper[4766]: E0102 00:07:05.907631 4766 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.200:6443: connect: connection refused" node="crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.971393 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 02 00:07:05 crc kubenswrapper[4766]: I0102 00:07:05.996733 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.006302 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 02 00:07:06 crc kubenswrapper[4766]: W0102 00:07:06.006507 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-a1e2db06bf42d300520d697aaa58efd804803adb5f6ea259c80cad8076a27ae3 WatchSource:0}: Error finding container a1e2db06bf42d300520d697aaa58efd804803adb5f6ea259c80cad8076a27ae3: Status 404 returned error can't find the container with id a1e2db06bf42d300520d697aaa58efd804803adb5f6ea259c80cad8076a27ae3 Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.028525 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.033065 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 02 00:07:06 crc kubenswrapper[4766]: W0102 00:07:06.051610 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-82812602d395c39268adf99f2ca00635ca330315001c0255de915f6f1cd4fb4d WatchSource:0}: Error finding container 82812602d395c39268adf99f2ca00635ca330315001c0255de915f6f1cd4fb4d: Status 404 returned error can't find the container with id 82812602d395c39268adf99f2ca00635ca330315001c0255de915f6f1cd4fb4d Jan 02 00:07:06 crc kubenswrapper[4766]: W0102 00:07:06.062470 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-85212c762f411f508d9dd48488e056598c6feeb296baab9f3279f229fa3a545c WatchSource:0}: Error finding container 85212c762f411f508d9dd48488e056598c6feeb296baab9f3279f229fa3a545c: Status 404 returned error can't find the container with id 85212c762f411f508d9dd48488e056598c6feeb296baab9f3279f229fa3a545c Jan 02 00:07:06 crc kubenswrapper[4766]: E0102 00:07:06.077452 4766 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" interval="800ms" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.308274 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.309947 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.310021 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.310048 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.310097 4766 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 02 00:07:06 crc kubenswrapper[4766]: E0102 00:07:06.310620 4766 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.200:6443: connect: connection refused" node="crc" Jan 02 00:07:06 crc kubenswrapper[4766]: W0102 00:07:06.413371 4766 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Jan 02 00:07:06 crc kubenswrapper[4766]: E0102 00:07:06.413759 4766 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.467753 4766 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.473690 4766 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 05:21:43.08257061 +0000 UTC Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.538555 4766 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="95f1335934f87803ed602250c386d6847423e6318047599b974707acb04494e2" exitCode=0 Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.538624 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"95f1335934f87803ed602250c386d6847423e6318047599b974707acb04494e2"} Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.538882 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"a1e2db06bf42d300520d697aaa58efd804803adb5f6ea259c80cad8076a27ae3"} Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.539124 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.540145 4766 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a" exitCode=0 Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.540186 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a"} Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.540230 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"85212c762f411f508d9dd48488e056598c6feeb296baab9f3279f229fa3a545c"} Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.540373 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.541087 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.541122 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.541135 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.542280 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.542314 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.542328 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.542680 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34"} Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.542716 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"82812602d395c39268adf99f2ca00635ca330315001c0255de915f6f1cd4fb4d"} Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.545627 4766 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="f3c8b26904adf1747b5012a834b9cd9a4c0a7ca3f0fc597593b594b9aebaee2a" exitCode=0 Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.545714 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"f3c8b26904adf1747b5012a834b9cd9a4c0a7ca3f0fc597593b594b9aebaee2a"} Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.545768 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a03a4bef895ad4c07a9064d6ed8bc1f095def432d036e7e2fabcafe6df5bc188"} Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.545950 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.547378 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.547419 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.547431 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.549405 4766 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205" exitCode=0 Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.549438 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205"} Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.549463 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cabbd5aeeca82e9ec39fbb5d80aeb372a4af12f3ed34a3275c6d9d69e4c6470b"} Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.549560 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.550771 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.550802 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.550815 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.553082 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.554052 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.554080 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:06 crc kubenswrapper[4766]: I0102 00:07:06.554092 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:06 crc kubenswrapper[4766]: W0102 00:07:06.749802 4766 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Jan 02 00:07:06 crc kubenswrapper[4766]: E0102 00:07:06.750033 4766 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Jan 02 00:07:06 crc kubenswrapper[4766]: W0102 00:07:06.821572 4766 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Jan 02 00:07:06 crc kubenswrapper[4766]: E0102 00:07:06.821735 4766 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Jan 02 00:07:06 crc kubenswrapper[4766]: E0102 00:07:06.879827 4766 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" interval="1.6s" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.111226 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.117742 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.117785 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.117794 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.117818 4766 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 02 00:07:07 crc kubenswrapper[4766]: E0102 00:07:07.118399 4766 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.200:6443: connect: connection refused" node="crc" Jan 02 00:07:07 crc kubenswrapper[4766]: W0102 00:07:07.120343 4766 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Jan 02 00:07:07 crc kubenswrapper[4766]: E0102 00:07:07.120400 4766 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Jan 02 00:07:07 crc kubenswrapper[4766]: E0102 00:07:07.360678 4766 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.200:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1886c1498f0d9d7f default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-02 00:07:05.466305919 +0000 UTC m=+0.264793083,LastTimestamp:2026-01-02 00:07:05.466305919 +0000 UTC m=+0.264793083,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.456328 4766 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 02 00:07:07 crc kubenswrapper[4766]: E0102 00:07:07.457326 4766 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.468379 4766 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.474611 4766 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 20:20:18.285068697 +0000 UTC Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.555590 4766 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="39f6ea9695cea2feb2db96f73d1155972bbeb524effb6b23e5a66fd7d208b91d" exitCode=0 Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.555692 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"39f6ea9695cea2feb2db96f73d1155972bbeb524effb6b23e5a66fd7d208b91d"} Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.555870 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.556740 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.556766 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.556777 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.559788 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087"} Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.559832 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa"} Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.559868 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd"} Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.559881 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee"} Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.561704 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"9ce4b7f2bbace6669b4af1d2b6a2aa0dfc734cc543639a61fd362427d2b364ee"} Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.561773 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.562557 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.562586 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.562597 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.565832 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"563fc7bf3d6725b377ad3961d854087aa94ab8584c9e9fd376100c030e9a04d3"} Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.565902 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"203b2285af60ff595b7712de3fbac8ef7ef3f3a1309a904615ea3465b7441d3b"} Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.565917 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2a27f65aeb508c6780fd6af38a7850a088580d9434e4149e959247cf694b3751"} Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.566021 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.566827 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.566909 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.566931 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.574699 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc"} Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.574748 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317"} Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.574763 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc"} Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.574880 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.575908 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.575955 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:07 crc kubenswrapper[4766]: I0102 00:07:07.575971 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.138577 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.147971 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.475066 4766 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 12:02:37.55925661 +0000 UTC Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.580602 4766 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="f5a53c095904f850f77e108ab8b4f90a468d69e57509c619b83936a3c3fc2df0" exitCode=0 Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.580664 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"f5a53c095904f850f77e108ab8b4f90a468d69e57509c619b83936a3c3fc2df0"} Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.580928 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.582111 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.582157 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.582174 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.587258 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498"} Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.587320 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.587408 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.587475 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.587484 4766 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.587649 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.589006 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.589047 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.589056 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.589099 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.589121 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.589183 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.589207 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.589221 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.589064 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.590092 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.590152 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.590172 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.719362 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.721626 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.721691 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.721709 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:08 crc kubenswrapper[4766]: I0102 00:07:08.721746 4766 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 02 00:07:09 crc kubenswrapper[4766]: I0102 00:07:09.476093 4766 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 02:18:48.106031378 +0000 UTC Jan 02 00:07:09 crc kubenswrapper[4766]: I0102 00:07:09.595163 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"9ca99de9b6ec4a85e4ee32254fdf93f846faf036748297d327bd4e0007954215"} Jan 02 00:07:09 crc kubenswrapper[4766]: I0102 00:07:09.595201 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ab50448d0cde9a3982d3106c391537332529754231f74e10b5599f122c56319c"} Jan 02 00:07:09 crc kubenswrapper[4766]: I0102 00:07:09.595213 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"06e451949779c2c0c34eb41926158609bc00dfce68bfa4d28ca5fd066ca1e05c"} Jan 02 00:07:09 crc kubenswrapper[4766]: I0102 00:07:09.595234 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:09 crc kubenswrapper[4766]: I0102 00:07:09.595301 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:09 crc kubenswrapper[4766]: I0102 00:07:09.595316 4766 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 02 00:07:09 crc kubenswrapper[4766]: I0102 00:07:09.595381 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:09 crc kubenswrapper[4766]: I0102 00:07:09.596487 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:09 crc kubenswrapper[4766]: I0102 00:07:09.596510 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:09 crc kubenswrapper[4766]: I0102 00:07:09.596519 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:09 crc kubenswrapper[4766]: I0102 00:07:09.597195 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:09 crc kubenswrapper[4766]: I0102 00:07:09.597248 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:09 crc kubenswrapper[4766]: I0102 00:07:09.597290 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:10 crc kubenswrapper[4766]: I0102 00:07:10.362677 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:10 crc kubenswrapper[4766]: I0102 00:07:10.476760 4766 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 23:19:09.707420828 +0000 UTC Jan 02 00:07:10 crc kubenswrapper[4766]: I0102 00:07:10.603693 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"733b6a898d0c6d48820f54912587d034469a7103af84972716331b1f5d87e88b"} Jan 02 00:07:10 crc kubenswrapper[4766]: I0102 00:07:10.603774 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3345a83fca294debd1ed87181d996d662bc3facf4d4484c64116590d05ac4025"} Jan 02 00:07:10 crc kubenswrapper[4766]: I0102 00:07:10.603788 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:10 crc kubenswrapper[4766]: I0102 00:07:10.603722 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:10 crc kubenswrapper[4766]: I0102 00:07:10.605081 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:10 crc kubenswrapper[4766]: I0102 00:07:10.605126 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:10 crc kubenswrapper[4766]: I0102 00:07:10.605144 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:10 crc kubenswrapper[4766]: I0102 00:07:10.605229 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:10 crc kubenswrapper[4766]: I0102 00:07:10.605271 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:10 crc kubenswrapper[4766]: I0102 00:07:10.605289 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:11 crc kubenswrapper[4766]: I0102 00:07:11.476910 4766 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 14:46:17.890850568 +0000 UTC Jan 02 00:07:11 crc kubenswrapper[4766]: I0102 00:07:11.567573 4766 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 02 00:07:11 crc kubenswrapper[4766]: I0102 00:07:11.607337 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:11 crc kubenswrapper[4766]: I0102 00:07:11.607338 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:11 crc kubenswrapper[4766]: I0102 00:07:11.608956 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:11 crc kubenswrapper[4766]: I0102 00:07:11.609006 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:11 crc kubenswrapper[4766]: I0102 00:07:11.609030 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:11 crc kubenswrapper[4766]: I0102 00:07:11.609592 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:11 crc kubenswrapper[4766]: I0102 00:07:11.609651 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:11 crc kubenswrapper[4766]: I0102 00:07:11.609674 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:11 crc kubenswrapper[4766]: I0102 00:07:11.927988 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jan 02 00:07:12 crc kubenswrapper[4766]: I0102 00:07:12.129634 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:12 crc kubenswrapper[4766]: I0102 00:07:12.159871 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:07:12 crc kubenswrapper[4766]: I0102 00:07:12.160048 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:12 crc kubenswrapper[4766]: I0102 00:07:12.161262 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:12 crc kubenswrapper[4766]: I0102 00:07:12.161300 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:12 crc kubenswrapper[4766]: I0102 00:07:12.161312 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:12 crc kubenswrapper[4766]: I0102 00:07:12.477229 4766 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 10:30:38.992883048 +0000 UTC Jan 02 00:07:12 crc kubenswrapper[4766]: I0102 00:07:12.609935 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:12 crc kubenswrapper[4766]: I0102 00:07:12.610059 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:12 crc kubenswrapper[4766]: I0102 00:07:12.611565 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:12 crc kubenswrapper[4766]: I0102 00:07:12.611624 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:12 crc kubenswrapper[4766]: I0102 00:07:12.611637 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:12 crc kubenswrapper[4766]: I0102 00:07:12.612011 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:12 crc kubenswrapper[4766]: I0102 00:07:12.612281 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:12 crc kubenswrapper[4766]: I0102 00:07:12.612470 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:13 crc kubenswrapper[4766]: I0102 00:07:13.396686 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:07:13 crc kubenswrapper[4766]: I0102 00:07:13.397417 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:13 crc kubenswrapper[4766]: I0102 00:07:13.399212 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:13 crc kubenswrapper[4766]: I0102 00:07:13.399348 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:13 crc kubenswrapper[4766]: I0102 00:07:13.399414 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:13 crc kubenswrapper[4766]: I0102 00:07:13.477516 4766 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 07:36:54.929690751 +0000 UTC Jan 02 00:07:14 crc kubenswrapper[4766]: I0102 00:07:14.478358 4766 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 11:57:21.084937511 +0000 UTC Jan 02 00:07:14 crc kubenswrapper[4766]: I0102 00:07:14.478440 4766 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 203h50m6.606505663s for next certificate rotation Jan 02 00:07:14 crc kubenswrapper[4766]: I0102 00:07:14.873142 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 02 00:07:14 crc kubenswrapper[4766]: I0102 00:07:14.873422 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:14 crc kubenswrapper[4766]: I0102 00:07:14.874815 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:14 crc kubenswrapper[4766]: I0102 00:07:14.874922 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:14 crc kubenswrapper[4766]: I0102 00:07:14.874947 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:15 crc kubenswrapper[4766]: E0102 00:07:15.610333 4766 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 02 00:07:17 crc kubenswrapper[4766]: I0102 00:07:17.120112 4766 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 02 00:07:17 crc kubenswrapper[4766]: I0102 00:07:17.120260 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 02 00:07:17 crc kubenswrapper[4766]: I0102 00:07:17.337533 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:07:17 crc kubenswrapper[4766]: I0102 00:07:17.337734 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:17 crc kubenswrapper[4766]: I0102 00:07:17.339728 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:17 crc kubenswrapper[4766]: I0102 00:07:17.339825 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:17 crc kubenswrapper[4766]: I0102 00:07:17.339870 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:17 crc kubenswrapper[4766]: I0102 00:07:17.344333 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:07:17 crc kubenswrapper[4766]: I0102 00:07:17.626048 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:17 crc kubenswrapper[4766]: I0102 00:07:17.631102 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:17 crc kubenswrapper[4766]: I0102 00:07:17.631161 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:17 crc kubenswrapper[4766]: I0102 00:07:17.631180 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:18 crc kubenswrapper[4766]: I0102 00:07:18.469165 4766 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Jan 02 00:07:18 crc kubenswrapper[4766]: E0102 00:07:18.480925 4766 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Jan 02 00:07:18 crc kubenswrapper[4766]: W0102 00:07:18.613492 4766 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 02 00:07:18 crc kubenswrapper[4766]: I0102 00:07:18.613630 4766 trace.go:236] Trace[424394310]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Jan-2026 00:07:08.611) (total time: 10001ms): Jan 02 00:07:18 crc kubenswrapper[4766]: Trace[424394310]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (00:07:18.613) Jan 02 00:07:18 crc kubenswrapper[4766]: Trace[424394310]: [10.001606464s] [10.001606464s] END Jan 02 00:07:18 crc kubenswrapper[4766]: E0102 00:07:18.613660 4766 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 02 00:07:18 crc kubenswrapper[4766]: W0102 00:07:18.682161 4766 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 02 00:07:18 crc kubenswrapper[4766]: I0102 00:07:18.682285 4766 trace.go:236] Trace[85980718]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Jan-2026 00:07:08.680) (total time: 10001ms): Jan 02 00:07:18 crc kubenswrapper[4766]: Trace[85980718]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (00:07:18.682) Jan 02 00:07:18 crc kubenswrapper[4766]: Trace[85980718]: [10.001798567s] [10.001798567s] END Jan 02 00:07:18 crc kubenswrapper[4766]: E0102 00:07:18.682316 4766 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 02 00:07:18 crc kubenswrapper[4766]: E0102 00:07:18.723046 4766 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Jan 02 00:07:19 crc kubenswrapper[4766]: I0102 00:07:19.043927 4766 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 02 00:07:19 crc kubenswrapper[4766]: I0102 00:07:19.044026 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 02 00:07:19 crc kubenswrapper[4766]: I0102 00:07:19.053487 4766 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 02 00:07:19 crc kubenswrapper[4766]: I0102 00:07:19.053590 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 02 00:07:19 crc kubenswrapper[4766]: I0102 00:07:19.594048 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jan 02 00:07:19 crc kubenswrapper[4766]: I0102 00:07:19.594359 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:19 crc kubenswrapper[4766]: I0102 00:07:19.596022 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:19 crc kubenswrapper[4766]: I0102 00:07:19.596083 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:19 crc kubenswrapper[4766]: I0102 00:07:19.596101 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:19 crc kubenswrapper[4766]: I0102 00:07:19.629980 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 02 00:07:19 crc kubenswrapper[4766]: I0102 00:07:19.631594 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:19 crc kubenswrapper[4766]: I0102 00:07:19.632578 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:19 crc kubenswrapper[4766]: I0102 00:07:19.632644 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:19 crc kubenswrapper[4766]: I0102 00:07:19.632663 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:19 crc kubenswrapper[4766]: I0102 00:07:19.649768 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 02 00:07:20 crc kubenswrapper[4766]: I0102 00:07:20.338332 4766 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 02 00:07:20 crc kubenswrapper[4766]: I0102 00:07:20.338392 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 02 00:07:20 crc kubenswrapper[4766]: I0102 00:07:20.368580 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:20 crc kubenswrapper[4766]: I0102 00:07:20.368789 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:20 crc kubenswrapper[4766]: I0102 00:07:20.370215 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:20 crc kubenswrapper[4766]: I0102 00:07:20.370251 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:20 crc kubenswrapper[4766]: I0102 00:07:20.370264 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:20 crc kubenswrapper[4766]: I0102 00:07:20.374198 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:20 crc kubenswrapper[4766]: I0102 00:07:20.633534 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:20 crc kubenswrapper[4766]: I0102 00:07:20.633633 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:20 crc kubenswrapper[4766]: I0102 00:07:20.634766 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:20 crc kubenswrapper[4766]: I0102 00:07:20.634869 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:20 crc kubenswrapper[4766]: I0102 00:07:20.634900 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:20 crc kubenswrapper[4766]: I0102 00:07:20.634920 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:20 crc kubenswrapper[4766]: I0102 00:07:20.634938 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:20 crc kubenswrapper[4766]: I0102 00:07:20.634958 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:21 crc kubenswrapper[4766]: I0102 00:07:21.923711 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:21 crc kubenswrapper[4766]: I0102 00:07:21.925369 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:21 crc kubenswrapper[4766]: I0102 00:07:21.925435 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:21 crc kubenswrapper[4766]: I0102 00:07:21.925447 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:21 crc kubenswrapper[4766]: I0102 00:07:21.925486 4766 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 02 00:07:21 crc kubenswrapper[4766]: E0102 00:07:21.929666 4766 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 02 00:07:23 crc kubenswrapper[4766]: I0102 00:07:23.012628 4766 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.051115 4766 trace.go:236] Trace[1343471281]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Jan-2026 00:07:09.920) (total time: 14130ms): Jan 02 00:07:24 crc kubenswrapper[4766]: Trace[1343471281]: ---"Objects listed" error: 14130ms (00:07:24.051) Jan 02 00:07:24 crc kubenswrapper[4766]: Trace[1343471281]: [14.130539007s] [14.130539007s] END Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.052043 4766 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.052124 4766 trace.go:236] Trace[290530381]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Jan-2026 00:07:09.081) (total time: 14970ms): Jan 02 00:07:24 crc kubenswrapper[4766]: Trace[290530381]: ---"Objects listed" error: 14970ms (00:07:24.051) Jan 02 00:07:24 crc kubenswrapper[4766]: Trace[290530381]: [14.970218095s] [14.970218095s] END Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.052172 4766 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.052357 4766 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.062337 4766 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.084088 4766 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:42512->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.084173 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:42512->192.168.126.11:17697: read: connection reset by peer" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.084788 4766 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.086185 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.090573 4766 csr.go:261] certificate signing request csr-snt2p is approved, waiting to be issued Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.101294 4766 csr.go:257] certificate signing request csr-snt2p is issued Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.483173 4766 apiserver.go:52] "Watching apiserver" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.486607 4766 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.487037 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.487509 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.487622 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.487552 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.487762 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.487912 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.487873 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.487946 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.487993 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.488146 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.490900 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.491612 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.491970 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.492341 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.492427 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.492629 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.492748 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.493223 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.494098 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.518621 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.532293 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.549165 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.561221 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.573932 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.574653 4766 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.593077 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.608271 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.656970 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657058 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657083 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657116 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657147 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657169 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657196 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657220 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657242 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657263 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657281 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657301 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657321 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657342 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657363 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657383 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657368 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657406 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657439 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657465 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657486 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657503 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657510 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657511 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657521 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657610 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657636 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657642 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657656 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657717 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657726 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657748 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657774 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657799 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657800 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657836 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657895 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657921 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657954 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657970 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.657986 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658011 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658032 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658056 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658079 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658047 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658100 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658117 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658172 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658219 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658246 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658261 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658297 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658351 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658376 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658398 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658426 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658447 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658473 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658510 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658544 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658569 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658595 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658624 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658655 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658687 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658721 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658755 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658797 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658819 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658861 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658887 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658911 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658935 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658959 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658984 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659010 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659033 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659057 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659078 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659099 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659158 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659189 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659220 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659246 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659272 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658625 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658675 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658687 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659401 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659430 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659460 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659483 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659507 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659546 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659581 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659639 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659675 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659713 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659747 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659774 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659798 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659824 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659907 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659932 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659954 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659976 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660001 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660027 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660055 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660083 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660108 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660133 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660157 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660182 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660204 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660231 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660255 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660277 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660299 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660323 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660345 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660368 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660391 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660414 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660439 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660465 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660489 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660515 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660542 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660566 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660590 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660613 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660635 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660660 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660684 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660710 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660736 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660759 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660783 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660809 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660832 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660883 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660914 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660944 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660974 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661004 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661035 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661067 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661096 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661128 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661188 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661223 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661255 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661286 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661321 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661354 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661389 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661422 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661456 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661491 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661521 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661559 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661593 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661627 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661659 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661691 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661722 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661757 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661789 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661823 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662190 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662230 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662263 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662298 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662330 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662361 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662396 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662432 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662469 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662505 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662540 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662576 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662608 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662642 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662679 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662712 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662746 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662780 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662814 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662876 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662912 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662948 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662982 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663016 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663049 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663085 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663119 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663153 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663192 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663227 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663260 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663302 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663347 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663386 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663419 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663454 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663481 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663546 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663576 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663604 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663632 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663660 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663688 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663715 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663740 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663764 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663792 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663820 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663901 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663929 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663953 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664036 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664052 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664066 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664081 4766 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664097 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664111 4766 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664124 4766 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664139 4766 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664154 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664168 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664182 4766 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664197 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664214 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664980 4766 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658740 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.658959 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659182 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659220 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659589 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.659601 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660024 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660128 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660218 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660331 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660402 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660469 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660512 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.660772 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661609 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.661783 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662058 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662110 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662372 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662467 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662616 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662831 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.662867 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663033 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663057 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663179 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663278 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663316 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663443 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663501 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663572 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663708 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.663960 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664148 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664245 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664389 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664489 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664621 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.664926 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.665244 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.665440 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.665579 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.665752 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.665893 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.665917 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.666343 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.666644 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.666789 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.666861 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.666880 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.666990 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.667177 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.667310 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.667380 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.667700 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.667753 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.667959 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.668147 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.668337 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.668643 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.668780 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.669387 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.669940 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.669948 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.670001 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.670424 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.670459 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.670561 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.670779 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.671155 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.671215 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.671536 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.671668 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.671792 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.672320 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.672655 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.673042 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.673316 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.673571 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.673679 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:07:25.17365402 +0000 UTC m=+19.972141184 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.675133 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.675612 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.676074 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.676367 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.676818 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.677311 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.677600 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.679058 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.679230 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.680187 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.680254 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.680342 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.680641 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.680939 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.680954 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.681460 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.681673 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.681748 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.682048 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.682110 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.682171 4766 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.682240 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:25.182216903 +0000 UTC m=+19.980704257 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.683283 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.683348 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.683586 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.683731 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.683923 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.683976 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.684304 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.682247 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.684379 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.684500 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.684518 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.684592 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.684906 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.684991 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.685091 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.685152 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.685456 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.685563 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.686231 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.686599 4766 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.686676 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:25.18664758 +0000 UTC m=+19.985134934 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.687295 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.689157 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.693268 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.694372 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.695249 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.695472 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.696070 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.696382 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.696444 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.696524 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.696774 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.697009 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.697523 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.698412 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.698482 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.699179 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.699593 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.699953 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.700065 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.700294 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.700501 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.700897 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.701135 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.701341 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.701551 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.701651 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.701808 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.701888 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.702060 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.702200 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.702360 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.702731 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.702822 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.703666 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.707836 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.708083 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.709986 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.710022 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.710230 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.710295 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.710292 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.710559 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.710594 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.710605 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.710633 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.710802 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.710993 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.710998 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.711226 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.711271 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.711397 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.711423 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.711439 4766 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.711511 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:25.211488475 +0000 UTC m=+20.009975629 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.711570 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.711581 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.711605 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.711617 4766 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.711628 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.711675 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:25.21165153 +0000 UTC m=+20.010138684 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.711679 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.711694 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.711769 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.712182 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.712699 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.712726 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.712882 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.713029 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.709799 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.713130 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.716374 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.717142 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.717830 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.718072 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.718524 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.718926 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.720273 4766 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498" exitCode=255 Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.720311 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498"} Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.728654 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.730963 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.737392 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.737580 4766 scope.go:117] "RemoveContainer" containerID="236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.737883 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.745543 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.750718 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.759942 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.762604 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.764890 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.765053 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.765663 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766036 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766091 4766 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766105 4766 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766114 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766123 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766169 4766 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766179 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766188 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766196 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766220 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766245 4766 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766254 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766301 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766311 4766 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766319 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766327 4766 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766335 4766 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766343 4766 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766351 4766 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766358 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766369 4766 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766380 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766389 4766 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766399 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766409 4766 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766420 4766 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766436 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766445 4766 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766453 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766460 4766 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766468 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766476 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766484 4766 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766493 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766515 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766523 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766531 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766539 4766 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766547 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766555 4766 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766562 4766 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766577 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766589 4766 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766685 4766 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766698 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766706 4766 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766714 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766721 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766738 4766 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766759 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766767 4766 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766775 4766 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766783 4766 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766791 4766 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766801 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766810 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766818 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766830 4766 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766837 4766 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766864 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766880 4766 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766891 4766 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766899 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766906 4766 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766914 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766922 4766 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766929 4766 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766942 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766949 4766 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766957 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766964 4766 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766972 4766 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766982 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.766993 4766 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767001 4766 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767008 4766 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767016 4766 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767029 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767037 4766 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767073 4766 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767120 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767136 4766 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767146 4766 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767154 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767170 4766 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767177 4766 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767186 4766 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767194 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767201 4766 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767209 4766 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767238 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767250 4766 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767257 4766 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767265 4766 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767272 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767342 4766 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767351 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767362 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767369 4766 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767377 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767388 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767471 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767482 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767490 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767499 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767507 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767568 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767577 4766 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767585 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767593 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767601 4766 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767608 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767615 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767623 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767687 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767696 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767704 4766 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767712 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767720 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767729 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767754 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767764 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767772 4766 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767779 4766 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767787 4766 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767799 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767807 4766 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767816 4766 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767828 4766 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767852 4766 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767863 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767946 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767960 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767968 4766 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767976 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767985 4766 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.767995 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768002 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768019 4766 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768027 4766 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768035 4766 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768045 4766 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768052 4766 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768060 4766 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768068 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768076 4766 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768083 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768093 4766 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768101 4766 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768108 4766 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768117 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768125 4766 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768137 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768145 4766 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768155 4766 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768162 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768177 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768185 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768201 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768209 4766 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768216 4766 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768231 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768239 4766 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768246 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768254 4766 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768261 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768268 4766 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768279 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768333 4766 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768341 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768348 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768356 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768363 4766 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768371 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768378 4766 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768399 4766 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768406 4766 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768414 4766 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768423 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768433 4766 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768441 4766 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.768448 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.775018 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.786295 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.804891 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.805213 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.812438 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.819624 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.829841 4766 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 02 00:07:24 crc kubenswrapper[4766]: container &Container{Name:network-operator,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,Command:[/bin/bash -c #!/bin/bash Jan 02 00:07:24 crc kubenswrapper[4766]: set -o allexport Jan 02 00:07:24 crc kubenswrapper[4766]: if [[ -f /etc/kubernetes/apiserver-url.env ]]; then Jan 02 00:07:24 crc kubenswrapper[4766]: source /etc/kubernetes/apiserver-url.env Jan 02 00:07:24 crc kubenswrapper[4766]: else Jan 02 00:07:24 crc kubenswrapper[4766]: echo "Error: /etc/kubernetes/apiserver-url.env is missing" Jan 02 00:07:24 crc kubenswrapper[4766]: exit 1 Jan 02 00:07:24 crc kubenswrapper[4766]: fi Jan 02 00:07:24 crc kubenswrapper[4766]: exec /usr/bin/cluster-network-operator start --listen=0.0.0.0:9104 Jan 02 00:07:24 crc kubenswrapper[4766]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:cno,HostPort:9104,ContainerPort:9104,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:RELEASE_VERSION,Value:4.18.1,ValueFrom:nil,},EnvVar{Name:KUBE_PROXY_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b97554198294bf544fbc116c94a0a1fb2ec8a4de0e926bf9d9e320135f0bee6f,ValueFrom:nil,},EnvVar{Name:KUBE_RBAC_PROXY_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09,ValueFrom:nil,},EnvVar{Name:MULTUS_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26,ValueFrom:nil,},EnvVar{Name:MULTUS_ADMISSION_CONTROLLER_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317,ValueFrom:nil,},EnvVar{Name:CNI_PLUGINS_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc,ValueFrom:nil,},EnvVar{Name:BOND_CNI_PLUGIN_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78,ValueFrom:nil,},EnvVar{Name:WHEREABOUTS_CNI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4,ValueFrom:nil,},EnvVar{Name:ROUTE_OVERRRIDE_CNI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa,ValueFrom:nil,},EnvVar{Name:MULTUS_NETWORKPOLICY_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:23f833d3738d68706eb2f2868bd76bd71cee016cffa6faf5f045a60cc8c6eddd,ValueFrom:nil,},EnvVar{Name:OVN_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2,ValueFrom:nil,},EnvVar{Name:OVN_NB_RAFT_ELECTION_TIMER,Value:10,ValueFrom:nil,},EnvVar{Name:OVN_SB_RAFT_ELECTION_TIMER,Value:16,ValueFrom:nil,},EnvVar{Name:OVN_NORTHD_PROBE_INTERVAL,Value:10000,ValueFrom:nil,},EnvVar{Name:OVN_CONTROLLER_INACTIVITY_PROBE,Value:180000,ValueFrom:nil,},EnvVar{Name:OVN_NB_INACTIVITY_PROBE,Value:60000,ValueFrom:nil,},EnvVar{Name:EGRESS_ROUTER_CNI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c,ValueFrom:nil,},EnvVar{Name:NETWORK_METRICS_DAEMON_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d,ValueFrom:nil,},EnvVar{Name:NETWORK_CHECK_SOURCE_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,ValueFrom:nil,},EnvVar{Name:NETWORK_CHECK_TARGET_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,ValueFrom:nil,},EnvVar{Name:NETWORK_OPERATOR_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,ValueFrom:nil,},EnvVar{Name:CLOUD_NETWORK_CONFIG_CONTROLLER_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8048f1cb0be521f09749c0a489503cd56d85b68c6ca93380e082cfd693cd97a8,ValueFrom:nil,},EnvVar{Name:CLI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2,ValueFrom:nil,},EnvVar{Name:FRR_K8S_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5dbf844e49bb46b78586930149e5e5f5dc121014c8afd10fe36f3651967cc256,ValueFrom:nil,},EnvVar{Name:NETWORKING_CONSOLE_PLUGIN_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd,ValueFrom:nil,},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:host-etc-kube,ReadOnly:true,MountPath:/etc/kubernetes,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-tls,ReadOnly:false,MountPath:/var/run/secrets/serving-cert,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rdwmf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod network-operator-58b4c7f79c-55gtf_openshift-network-operator(37a5e44f-9a88-4405-be8a-b645485e7312): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Jan 02 00:07:24 crc kubenswrapper[4766]: > logger="UnhandledError" Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.831268 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"network-operator\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" podUID="37a5e44f-9a88-4405-be8a-b645485e7312" Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.846973 4766 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:iptables-alerter,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2,Command:[/iptables-alerter/iptables-alerter.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONTAINER_RUNTIME_ENDPOINT,Value:unix:///run/crio/crio.sock,ValueFrom:nil,},EnvVar{Name:ALERTER_POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{68157440 0} {} 65Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:iptables-alerter-script,ReadOnly:false,MountPath:/iptables-alerter,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:host-slash,ReadOnly:true,MountPath:/host,SubPath:,MountPropagation:*HostToContainer,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rczfb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:*true,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod iptables-alerter-4ln5h_openshift-network-operator(d75a4c96-2883-4a0b-bab2-0fab2b6c0b49): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars" logger="UnhandledError" Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.847609 4766 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 02 00:07:24 crc kubenswrapper[4766]: container &Container{Name:webhook,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2,Command:[/bin/bash -c set -xe Jan 02 00:07:24 crc kubenswrapper[4766]: if [[ -f "/env/_master" ]]; then Jan 02 00:07:24 crc kubenswrapper[4766]: set -o allexport Jan 02 00:07:24 crc kubenswrapper[4766]: source "/env/_master" Jan 02 00:07:24 crc kubenswrapper[4766]: set +o allexport Jan 02 00:07:24 crc kubenswrapper[4766]: fi Jan 02 00:07:24 crc kubenswrapper[4766]: # OVN-K will try to remove hybrid overlay node annotations even when the hybrid overlay is not enabled. Jan 02 00:07:24 crc kubenswrapper[4766]: # https://github.com/ovn-org/ovn-kubernetes/blob/ac6820df0b338a246f10f412cd5ec903bd234694/go-controller/pkg/ovn/master.go#L791 Jan 02 00:07:24 crc kubenswrapper[4766]: ho_enable="--enable-hybrid-overlay" Jan 02 00:07:24 crc kubenswrapper[4766]: echo "I$(date "+%m%d %H:%M:%S.%N") - network-node-identity - start webhook" Jan 02 00:07:24 crc kubenswrapper[4766]: # extra-allowed-user: service account `ovn-kubernetes-control-plane` Jan 02 00:07:24 crc kubenswrapper[4766]: # sets pod annotations in multi-homing layer3 network controller (cluster-manager) Jan 02 00:07:24 crc kubenswrapper[4766]: exec /usr/bin/ovnkube-identity --k8s-apiserver=https://api-int.crc.testing:6443 \ Jan 02 00:07:24 crc kubenswrapper[4766]: --webhook-cert-dir="/etc/webhook-cert" \ Jan 02 00:07:24 crc kubenswrapper[4766]: --webhook-host=127.0.0.1 \ Jan 02 00:07:24 crc kubenswrapper[4766]: --webhook-port=9743 \ Jan 02 00:07:24 crc kubenswrapper[4766]: ${ho_enable} \ Jan 02 00:07:24 crc kubenswrapper[4766]: --enable-interconnect \ Jan 02 00:07:24 crc kubenswrapper[4766]: --disable-approver \ Jan 02 00:07:24 crc kubenswrapper[4766]: --extra-allowed-user="system:serviceaccount:openshift-ovn-kubernetes:ovn-kubernetes-control-plane" \ Jan 02 00:07:24 crc kubenswrapper[4766]: --wait-for-kubernetes-api=200s \ Jan 02 00:07:24 crc kubenswrapper[4766]: --pod-admission-conditions="/var/run/ovnkube-identity-config/additional-pod-admission-cond.json" \ Jan 02 00:07:24 crc kubenswrapper[4766]: --loglevel="${LOGLEVEL}" Jan 02 00:07:24 crc kubenswrapper[4766]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LOGLEVEL,Value:2,ValueFrom:nil,},EnvVar{Name:KUBERNETES_NODE_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:spec.nodeName,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/etc/webhook-cert/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:env-overrides,ReadOnly:false,MountPath:/env,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovnkube-identity-cm,ReadOnly:false,MountPath:/var/run/ovnkube-identity-config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s2kz5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000470000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod network-node-identity-vrzqb_openshift-network-node-identity(ef543e1b-8068-4ea3-b32a-61027b32e95d): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Jan 02 00:07:24 crc kubenswrapper[4766]: > logger="UnhandledError" Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.849270 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"iptables-alerter\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"" pod="openshift-network-operator/iptables-alerter-4ln5h" podUID="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.850701 4766 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 02 00:07:24 crc kubenswrapper[4766]: container &Container{Name:approver,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2,Command:[/bin/bash -c set -xe Jan 02 00:07:24 crc kubenswrapper[4766]: if [[ -f "/env/_master" ]]; then Jan 02 00:07:24 crc kubenswrapper[4766]: set -o allexport Jan 02 00:07:24 crc kubenswrapper[4766]: source "/env/_master" Jan 02 00:07:24 crc kubenswrapper[4766]: set +o allexport Jan 02 00:07:24 crc kubenswrapper[4766]: fi Jan 02 00:07:24 crc kubenswrapper[4766]: Jan 02 00:07:24 crc kubenswrapper[4766]: echo "I$(date "+%m%d %H:%M:%S.%N") - network-node-identity - start approver" Jan 02 00:07:24 crc kubenswrapper[4766]: exec /usr/bin/ovnkube-identity --k8s-apiserver=https://api-int.crc.testing:6443 \ Jan 02 00:07:24 crc kubenswrapper[4766]: --disable-webhook \ Jan 02 00:07:24 crc kubenswrapper[4766]: --csr-acceptance-conditions="/var/run/ovnkube-identity-config/additional-cert-acceptance-cond.json" \ Jan 02 00:07:24 crc kubenswrapper[4766]: --loglevel="${LOGLEVEL}" Jan 02 00:07:24 crc kubenswrapper[4766]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LOGLEVEL,Value:4,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:env-overrides,ReadOnly:false,MountPath:/env,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovnkube-identity-cm,ReadOnly:false,MountPath:/var/run/ovnkube-identity-config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s2kz5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000470000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod network-node-identity-vrzqb_openshift-network-node-identity(ef543e1b-8068-4ea3-b32a-61027b32e95d): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Jan 02 00:07:24 crc kubenswrapper[4766]: > logger="UnhandledError" Jan 02 00:07:24 crc kubenswrapper[4766]: E0102 00:07:24.851956 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"webhook\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\", failed to \"StartContainer\" for \"approver\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"]" pod="openshift-network-node-identity/network-node-identity-vrzqb" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" Jan 02 00:07:24 crc kubenswrapper[4766]: I0102 00:07:24.903782 4766 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.102922 4766 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-02 00:02:24 +0000 UTC, rotation deadline is 2026-10-02 18:10:18.149431025 +0000 UTC Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.103707 4766 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6570h2m53.045728564s for next certificate rotation Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.272728 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.272825 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:25 crc kubenswrapper[4766]: E0102 00:07:25.272948 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:07:26.27292148 +0000 UTC m=+21.071408634 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.272979 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.273007 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.273029 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:25 crc kubenswrapper[4766]: E0102 00:07:25.273119 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:07:25 crc kubenswrapper[4766]: E0102 00:07:25.273188 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:07:25 crc kubenswrapper[4766]: E0102 00:07:25.273208 4766 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:25 crc kubenswrapper[4766]: E0102 00:07:25.273218 4766 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:07:25 crc kubenswrapper[4766]: E0102 00:07:25.273265 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:26.273256061 +0000 UTC m=+21.071743215 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:07:25 crc kubenswrapper[4766]: E0102 00:07:25.273284 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:26.273276571 +0000 UTC m=+21.071763715 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:25 crc kubenswrapper[4766]: E0102 00:07:25.273154 4766 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:07:25 crc kubenswrapper[4766]: E0102 00:07:25.273340 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:07:25 crc kubenswrapper[4766]: E0102 00:07:25.273364 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:26.273331763 +0000 UTC m=+21.071819077 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:07:25 crc kubenswrapper[4766]: E0102 00:07:25.273368 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:07:25 crc kubenswrapper[4766]: E0102 00:07:25.273388 4766 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:25 crc kubenswrapper[4766]: E0102 00:07:25.273453 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:26.273433775 +0000 UTC m=+21.071920929 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.394244 4766 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 02 00:07:25 crc kubenswrapper[4766]: W0102 00:07:25.394676 4766 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.Service ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Jan 02 00:07:25 crc kubenswrapper[4766]: W0102 00:07:25.394744 4766 reflector.go:484] object-"openshift-network-operator"/"iptables-alerter-script": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"iptables-alerter-script": Unexpected watch close - watch lasted less than a second and no items received Jan 02 00:07:25 crc kubenswrapper[4766]: W0102 00:07:25.394837 4766 reflector.go:484] object-"openshift-network-operator"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 02 00:07:25 crc kubenswrapper[4766]: W0102 00:07:25.394892 4766 reflector.go:484] object-"openshift-network-node-identity"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 02 00:07:25 crc kubenswrapper[4766]: W0102 00:07:25.394926 4766 reflector.go:484] object-"openshift-network-node-identity"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 02 00:07:25 crc kubenswrapper[4766]: W0102 00:07:25.394944 4766 reflector.go:484] object-"openshift-network-node-identity"/"env-overrides": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"env-overrides": Unexpected watch close - watch lasted less than a second and no items received Jan 02 00:07:25 crc kubenswrapper[4766]: W0102 00:07:25.394960 4766 reflector.go:484] object-"openshift-network-node-identity"/"network-node-identity-cert": watch of *v1.Secret ended with: very short watch: object-"openshift-network-node-identity"/"network-node-identity-cert": Unexpected watch close - watch lasted less than a second and no items received Jan 02 00:07:25 crc kubenswrapper[4766]: W0102 00:07:25.394982 4766 reflector.go:484] object-"openshift-network-node-identity"/"ovnkube-identity-cm": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"ovnkube-identity-cm": Unexpected watch close - watch lasted less than a second and no items received Jan 02 00:07:25 crc kubenswrapper[4766]: W0102 00:07:25.394997 4766 reflector.go:484] object-"openshift-network-operator"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 02 00:07:25 crc kubenswrapper[4766]: W0102 00:07:25.395031 4766 reflector.go:484] object-"openshift-network-operator"/"metrics-tls": watch of *v1.Secret ended with: very short watch: object-"openshift-network-operator"/"metrics-tls": Unexpected watch close - watch lasted less than a second and no items received Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.420786 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-mdmsr"] Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.421306 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-mdmsr" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.422211 4766 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:25 crc kubenswrapper[4766]: W0102 00:07:25.424011 4766 reflector.go:561] object-"openshift-dns"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Jan 02 00:07:25 crc kubenswrapper[4766]: E0102 00:07:25.424056 4766 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 02 00:07:25 crc kubenswrapper[4766]: W0102 00:07:25.424898 4766 reflector.go:561] object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": failed to list *v1.Secret: secrets "node-resolver-dockercfg-kz9s7" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Jan 02 00:07:25 crc kubenswrapper[4766]: E0102 00:07:25.424966 4766 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"node-resolver-dockercfg-kz9s7\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"node-resolver-dockercfg-kz9s7\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 02 00:07:25 crc kubenswrapper[4766]: W0102 00:07:25.426009 4766 reflector.go:561] object-"openshift-dns"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Jan 02 00:07:25 crc kubenswrapper[4766]: E0102 00:07:25.426036 4766 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.480591 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.505406 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.522724 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.538202 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.538835 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.540294 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.541072 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.542306 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.542943 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.543705 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.544755 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.545392 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.546340 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.546799 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.550511 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.551514 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.552255 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.553955 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.554616 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.555832 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.556350 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.557090 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.558603 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.559205 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.560431 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.561000 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.562301 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.562807 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.563584 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.565017 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.565604 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.566838 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.567447 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.568523 4766 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.568647 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.570784 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.572021 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.572540 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.574391 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.575392 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.576541 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.577377 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.578043 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc-hosts-file\") pod \"node-resolver-mdmsr\" (UID: \"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\") " pod="openshift-dns/node-resolver-mdmsr" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.578255 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kfs6\" (UniqueName: \"kubernetes.io/projected/7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc-kube-api-access-6kfs6\") pod \"node-resolver-mdmsr\" (UID: \"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\") " pod="openshift-dns/node-resolver-mdmsr" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.578733 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.579386 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.580810 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.581517 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.582741 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.583290 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.584687 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.585568 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.587033 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.587659 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.588722 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.589271 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.590366 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.590636 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.591360 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.591974 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.622672 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.681279 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc-hosts-file\") pod \"node-resolver-mdmsr\" (UID: \"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\") " pod="openshift-dns/node-resolver-mdmsr" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.681329 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kfs6\" (UniqueName: \"kubernetes.io/projected/7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc-kube-api-access-6kfs6\") pod \"node-resolver-mdmsr\" (UID: \"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\") " pod="openshift-dns/node-resolver-mdmsr" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.681515 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc-hosts-file\") pod \"node-resolver-mdmsr\" (UID: \"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\") " pod="openshift-dns/node-resolver-mdmsr" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.696965 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.725136 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.726928 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e"} Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.727228 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.728018 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.728065 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"338fe76e4376137fae0102ce8d92a4be240ba7a3eccc5e9de4487f294a9be7d9"} Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.732900 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"7c50f12ec9c17ddb2b7b60baf86366da50e0daa725b41adce0f9573dd4161c29"} Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.736572 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"91dcfa1549bdcb4f0c68c7b918ce3879b50afe914d5c03cbcbede26b900f4672"} Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.772091 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.812443 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.825379 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-lmc2p"] Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.825715 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.826178 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-hlsf9"] Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.826499 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.829665 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-s78r2"] Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.830234 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.830305 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.830246 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.830468 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.830478 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.830633 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.830639 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.830888 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.830996 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.831129 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.834187 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.834242 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.834250 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.834418 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.855057 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.871290 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.887252 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.901970 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.921420 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.937234 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.950504 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984237 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-tuning-conf-dir\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984287 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-var-lib-cni-bin\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984329 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-hostroot\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984348 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-system-cni-dir\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984365 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-var-lib-cni-multus\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984392 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-os-release\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984408 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/adc58583-1116-4066-9ca8-da659b367839-cni-binary-copy\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984423 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/adc58583-1116-4066-9ca8-da659b367839-multus-daemon-config\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984443 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-cni-binary-copy\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984459 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-run-k8s-cni-cncf-io\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984477 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvrt6\" (UniqueName: \"kubernetes.io/projected/a62872f4-c5c6-4f43-ba8a-40e0c96e6968-kube-api-access-bvrt6\") pod \"machine-config-daemon-hlsf9\" (UID: \"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\") " pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984494 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-multus-cni-dir\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984509 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-cnibin\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984526 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmj8l\" (UniqueName: \"kubernetes.io/projected/adc58583-1116-4066-9ca8-da659b367839-kube-api-access-hmj8l\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984542 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-etc-kubernetes\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984567 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-system-cni-dir\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984585 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a62872f4-c5c6-4f43-ba8a-40e0c96e6968-rootfs\") pod \"machine-config-daemon-hlsf9\" (UID: \"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\") " pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984604 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-cnibin\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984623 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984642 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-os-release\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984659 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-multus-socket-dir-parent\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984682 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-multus-conf-dir\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984703 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqnpm\" (UniqueName: \"kubernetes.io/projected/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-kube-api-access-sqnpm\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984738 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a62872f4-c5c6-4f43-ba8a-40e0c96e6968-mcd-auth-proxy-config\") pod \"machine-config-daemon-hlsf9\" (UID: \"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\") " pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984760 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-run-multus-certs\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984780 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a62872f4-c5c6-4f43-ba8a-40e0c96e6968-proxy-tls\") pod \"machine-config-daemon-hlsf9\" (UID: \"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\") " pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984807 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-run-netns\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.984826 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-var-lib-kubelet\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:25 crc kubenswrapper[4766]: I0102 00:07:25.988973 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.007753 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.024119 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.041423 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.056870 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.070626 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.084954 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085234 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-cni-binary-copy\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085276 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-run-k8s-cni-cncf-io\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085302 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvrt6\" (UniqueName: \"kubernetes.io/projected/a62872f4-c5c6-4f43-ba8a-40e0c96e6968-kube-api-access-bvrt6\") pod \"machine-config-daemon-hlsf9\" (UID: \"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\") " pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085326 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-multus-cni-dir\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085348 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-cnibin\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085364 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmj8l\" (UniqueName: \"kubernetes.io/projected/adc58583-1116-4066-9ca8-da659b367839-kube-api-access-hmj8l\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085385 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-etc-kubernetes\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085418 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-system-cni-dir\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085441 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a62872f4-c5c6-4f43-ba8a-40e0c96e6968-rootfs\") pod \"machine-config-daemon-hlsf9\" (UID: \"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\") " pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085465 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-cnibin\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085487 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085511 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-os-release\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085532 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-multus-socket-dir-parent\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085568 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-multus-conf-dir\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085592 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqnpm\" (UniqueName: \"kubernetes.io/projected/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-kube-api-access-sqnpm\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085628 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a62872f4-c5c6-4f43-ba8a-40e0c96e6968-mcd-auth-proxy-config\") pod \"machine-config-daemon-hlsf9\" (UID: \"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\") " pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085653 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-run-multus-certs\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085675 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a62872f4-c5c6-4f43-ba8a-40e0c96e6968-proxy-tls\") pod \"machine-config-daemon-hlsf9\" (UID: \"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\") " pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085702 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-run-netns\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085723 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-var-lib-kubelet\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085749 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-tuning-conf-dir\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085774 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-var-lib-cni-bin\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085796 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-hostroot\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085816 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-system-cni-dir\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085861 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-var-lib-cni-multus\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085894 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-os-release\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085917 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/adc58583-1116-4066-9ca8-da659b367839-cni-binary-copy\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.085939 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/adc58583-1116-4066-9ca8-da659b367839-multus-daemon-config\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.086196 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-cni-binary-copy\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.086294 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-multus-conf-dir\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.086353 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-run-k8s-cni-cncf-io\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.086638 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-multus-cni-dir\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.086687 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-cnibin\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.086711 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/adc58583-1116-4066-9ca8-da659b367839-multus-daemon-config\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.086865 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-etc-kubernetes\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.086910 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-system-cni-dir\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.086939 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a62872f4-c5c6-4f43-ba8a-40e0c96e6968-rootfs\") pod \"machine-config-daemon-hlsf9\" (UID: \"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\") " pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.086966 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-cnibin\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.087207 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-var-lib-cni-bin\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.087314 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-run-multus-certs\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.087413 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-run-netns\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.087490 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.087486 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-var-lib-cni-multus\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.087541 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-hostroot\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.087580 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a62872f4-c5c6-4f43-ba8a-40e0c96e6968-mcd-auth-proxy-config\") pod \"machine-config-daemon-hlsf9\" (UID: \"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\") " pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.087534 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-os-release\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.087580 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-system-cni-dir\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.087693 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-host-var-lib-kubelet\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.087691 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-os-release\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.087386 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/adc58583-1116-4066-9ca8-da659b367839-multus-socket-dir-parent\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.087978 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/adc58583-1116-4066-9ca8-da659b367839-cni-binary-copy\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.088064 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-tuning-conf-dir\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.096059 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a62872f4-c5c6-4f43-ba8a-40e0c96e6968-proxy-tls\") pod \"machine-config-daemon-hlsf9\" (UID: \"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\") " pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.105701 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.123436 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvrt6\" (UniqueName: \"kubernetes.io/projected/a62872f4-c5c6-4f43-ba8a-40e0c96e6968-kube-api-access-bvrt6\") pod \"machine-config-daemon-hlsf9\" (UID: \"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\") " pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.123461 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmj8l\" (UniqueName: \"kubernetes.io/projected/adc58583-1116-4066-9ca8-da659b367839-kube-api-access-hmj8l\") pod \"multus-lmc2p\" (UID: \"adc58583-1116-4066-9ca8-da659b367839\") " pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.125258 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqnpm\" (UniqueName: \"kubernetes.io/projected/7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae-kube-api-access-sqnpm\") pod \"multus-additional-cni-plugins-s78r2\" (UID: \"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\") " pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.127258 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.152233 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.170296 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-lmc2p" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.182471 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.195295 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-s78r2" Jan 02 00:07:26 crc kubenswrapper[4766]: W0102 00:07:26.196710 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda62872f4_c5c6_4f43_ba8a_40e0c96e6968.slice/crio-2c89f5e39166477c8c0752658b63fdcbd665853be4225a7a00dc7af849d274ae WatchSource:0}: Error finding container 2c89f5e39166477c8c0752658b63fdcbd665853be4225a7a00dc7af849d274ae: Status 404 returned error can't find the container with id 2c89f5e39166477c8c0752658b63fdcbd665853be4225a7a00dc7af849d274ae Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.205340 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 02 00:07:26 crc kubenswrapper[4766]: W0102 00:07:26.212158 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7e2dfc33_3d7d_4542_82d0_91c64b3cf0ae.slice/crio-a7639dc7ddae8c48218de59b4467989ce1fe0950e1e0eeaf8348c5eb21751261 WatchSource:0}: Error finding container a7639dc7ddae8c48218de59b4467989ce1fe0950e1e0eeaf8348c5eb21751261: Status 404 returned error can't find the container with id a7639dc7ddae8c48218de59b4467989ce1fe0950e1e0eeaf8348c5eb21751261 Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.214087 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xps4h"] Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.214826 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.219220 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.219262 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.219380 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.219544 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.219616 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.220440 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.221107 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.223687 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.224015 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.242020 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.270792 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.279605 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.287209 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.287310 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.287346 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.287366 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.287388 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:26 crc kubenswrapper[4766]: E0102 00:07:26.287502 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:07:26 crc kubenswrapper[4766]: E0102 00:07:26.287521 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:07:26 crc kubenswrapper[4766]: E0102 00:07:26.287532 4766 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:26 crc kubenswrapper[4766]: E0102 00:07:26.287577 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:28.287561875 +0000 UTC m=+23.086049029 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:26 crc kubenswrapper[4766]: E0102 00:07:26.287742 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:07:28.287709509 +0000 UTC m=+23.086196663 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:07:26 crc kubenswrapper[4766]: E0102 00:07:26.287746 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:07:26 crc kubenswrapper[4766]: E0102 00:07:26.287876 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:07:26 crc kubenswrapper[4766]: E0102 00:07:26.287886 4766 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:07:26 crc kubenswrapper[4766]: E0102 00:07:26.287897 4766 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:26 crc kubenswrapper[4766]: E0102 00:07:26.287775 4766 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:07:26 crc kubenswrapper[4766]: E0102 00:07:26.287941 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:28.287929315 +0000 UTC m=+23.086416529 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:07:26 crc kubenswrapper[4766]: E0102 00:07:26.288062 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:28.288049578 +0000 UTC m=+23.086536832 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:26 crc kubenswrapper[4766]: E0102 00:07:26.288075 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:28.288068599 +0000 UTC m=+23.086555873 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.317660 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.350358 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.350428 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.361768 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.384766 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388252 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-openvswitch\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388289 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-log-socket\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388314 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-ovn\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388330 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-cni-netd\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388353 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-kubelet\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388367 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-systemd\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388383 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovnkube-config\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388406 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-env-overrides\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388448 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-systemd-units\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388478 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388501 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovnkube-script-lib\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388518 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-run-netns\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388536 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-run-ovn-kubernetes\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388563 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbsns\" (UniqueName: \"kubernetes.io/projected/66b7fd44-a7ca-46a0-a585-cb37baccc49e-kube-api-access-sbsns\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388579 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-var-lib-openvswitch\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388593 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovn-node-metrics-cert\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388607 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-node-log\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388622 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-cni-bin\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388639 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-slash\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.388654 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-etc-openvswitch\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.423079 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.429567 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.434131 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.467838 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.490878 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-env-overrides\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.490934 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-systemd-units\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.490952 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.490971 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovnkube-script-lib\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491034 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491062 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-systemd-units\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491097 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-run-netns\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491077 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-run-netns\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491159 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-run-ovn-kubernetes\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491198 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-var-lib-openvswitch\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491215 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovn-node-metrics-cert\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491231 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbsns\" (UniqueName: \"kubernetes.io/projected/66b7fd44-a7ca-46a0-a585-cb37baccc49e-kube-api-access-sbsns\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491251 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-node-log\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491268 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-cni-bin\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491303 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-slash\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491334 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-etc-openvswitch\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491359 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-openvswitch\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491374 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-log-socket\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491404 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-ovn\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491428 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-cni-netd\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491448 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-kubelet\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491462 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-systemd\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491479 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovnkube-config\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491464 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-node-log\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491540 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-log-socket\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491586 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-cni-netd\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491584 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-ovn\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491487 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-cni-bin\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491634 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-systemd\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491635 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-slash\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491596 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-etc-openvswitch\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491546 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-run-ovn-kubernetes\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491638 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-kubelet\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491665 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-openvswitch\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491516 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-env-overrides\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.491692 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-var-lib-openvswitch\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.492302 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovnkube-config\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.492382 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovnkube-script-lib\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.496540 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovn-node-metrics-cert\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.500805 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.507977 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbsns\" (UniqueName: \"kubernetes.io/projected/66b7fd44-a7ca-46a0-a585-cb37baccc49e-kube-api-access-sbsns\") pod \"ovnkube-node-xps4h\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.518973 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.533233 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.533328 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:26 crc kubenswrapper[4766]: E0102 00:07:26.533372 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.533334 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:26 crc kubenswrapper[4766]: E0102 00:07:26.533525 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:26 crc kubenswrapper[4766]: E0102 00:07:26.533581 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.546190 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.549684 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: W0102 00:07:26.560146 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66b7fd44_a7ca_46a0_a585_cb37baccc49e.slice/crio-3a4095793733b3ef5eb750899e9f104983770b40d96ac0c571b92c2fbfed474a WatchSource:0}: Error finding container 3a4095793733b3ef5eb750899e9f104983770b40d96ac0c571b92c2fbfed474a: Status 404 returned error can't find the container with id 3a4095793733b3ef5eb750899e9f104983770b40d96ac0c571b92c2fbfed474a Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.571246 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.577121 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.703548 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.740469 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" event={"ID":"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae","Type":"ContainerStarted","Data":"def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd"} Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.740561 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" event={"ID":"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae","Type":"ContainerStarted","Data":"a7639dc7ddae8c48218de59b4467989ce1fe0950e1e0eeaf8348c5eb21751261"} Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.744687 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.750712 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" event={"ID":"a62872f4-c5c6-4f43-ba8a-40e0c96e6968","Type":"ContainerStarted","Data":"3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40"} Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.750813 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" event={"ID":"a62872f4-c5c6-4f43-ba8a-40e0c96e6968","Type":"ContainerStarted","Data":"b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be"} Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.750833 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" event={"ID":"a62872f4-c5c6-4f43-ba8a-40e0c96e6968","Type":"ContainerStarted","Data":"2c89f5e39166477c8c0752658b63fdcbd665853be4225a7a00dc7af849d274ae"} Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.753729 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lmc2p" event={"ID":"adc58583-1116-4066-9ca8-da659b367839","Type":"ContainerStarted","Data":"f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d"} Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.753804 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lmc2p" event={"ID":"adc58583-1116-4066-9ca8-da659b367839","Type":"ContainerStarted","Data":"8e839240790fc53eda36810236b1da7ae2c9038bdadae94d01c44dbb9977b042"} Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.756159 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75"} Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.756203 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42"} Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.758147 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693"} Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.759886 4766 generic.go:334] "Generic (PLEG): container finished" podID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerID="eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f" exitCode=0 Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.759934 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.760082 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerDied","Data":"eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f"} Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.760153 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerStarted","Data":"3a4095793733b3ef5eb750899e9f104983770b40d96ac0c571b92c2fbfed474a"} Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.773525 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.788803 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.809229 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.827806 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.844538 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.870422 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.899014 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.904027 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.911621 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kfs6\" (UniqueName: \"kubernetes.io/projected/7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc-kube-api-access-6kfs6\") pod \"node-resolver-mdmsr\" (UID: \"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\") " pod="openshift-dns/node-resolver-mdmsr" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.918943 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.919262 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.930177 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.943086 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.956758 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.969344 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.980190 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-mdmsr" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.982516 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:26 crc kubenswrapper[4766]: I0102 00:07:26.998662 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.022686 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.043872 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.060910 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.074831 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.087790 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.101749 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.113540 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.124472 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.138583 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.341717 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.353862 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.354655 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.358890 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.372490 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.385441 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.402940 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.410909 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-56zlf"] Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.411283 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-56zlf" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.414490 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.414724 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.414878 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.415201 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.416982 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.443207 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.458767 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.482340 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.505473 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.511881 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/795ac7da-eca3-4b11-8a9c-308150aa9164-serviceca\") pod \"node-ca-56zlf\" (UID: \"795ac7da-eca3-4b11-8a9c-308150aa9164\") " pod="openshift-image-registry/node-ca-56zlf" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.511925 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwznf\" (UniqueName: \"kubernetes.io/projected/795ac7da-eca3-4b11-8a9c-308150aa9164-kube-api-access-lwznf\") pod \"node-ca-56zlf\" (UID: \"795ac7da-eca3-4b11-8a9c-308150aa9164\") " pod="openshift-image-registry/node-ca-56zlf" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.511980 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/795ac7da-eca3-4b11-8a9c-308150aa9164-host\") pod \"node-ca-56zlf\" (UID: \"795ac7da-eca3-4b11-8a9c-308150aa9164\") " pod="openshift-image-registry/node-ca-56zlf" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.520460 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.538709 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.552648 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.579580 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.601042 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.612882 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/795ac7da-eca3-4b11-8a9c-308150aa9164-host\") pod \"node-ca-56zlf\" (UID: \"795ac7da-eca3-4b11-8a9c-308150aa9164\") " pod="openshift-image-registry/node-ca-56zlf" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.612928 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/795ac7da-eca3-4b11-8a9c-308150aa9164-serviceca\") pod \"node-ca-56zlf\" (UID: \"795ac7da-eca3-4b11-8a9c-308150aa9164\") " pod="openshift-image-registry/node-ca-56zlf" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.612946 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwznf\" (UniqueName: \"kubernetes.io/projected/795ac7da-eca3-4b11-8a9c-308150aa9164-kube-api-access-lwznf\") pod \"node-ca-56zlf\" (UID: \"795ac7da-eca3-4b11-8a9c-308150aa9164\") " pod="openshift-image-registry/node-ca-56zlf" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.613013 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/795ac7da-eca3-4b11-8a9c-308150aa9164-host\") pod \"node-ca-56zlf\" (UID: \"795ac7da-eca3-4b11-8a9c-308150aa9164\") " pod="openshift-image-registry/node-ca-56zlf" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.614286 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/795ac7da-eca3-4b11-8a9c-308150aa9164-serviceca\") pod \"node-ca-56zlf\" (UID: \"795ac7da-eca3-4b11-8a9c-308150aa9164\") " pod="openshift-image-registry/node-ca-56zlf" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.617974 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.630944 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwznf\" (UniqueName: \"kubernetes.io/projected/795ac7da-eca3-4b11-8a9c-308150aa9164-kube-api-access-lwznf\") pod \"node-ca-56zlf\" (UID: \"795ac7da-eca3-4b11-8a9c-308150aa9164\") " pod="openshift-image-registry/node-ca-56zlf" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.637085 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.650414 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.676962 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.694683 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.708486 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.722615 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.726926 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-56zlf" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.737428 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: W0102 00:07:27.739771 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod795ac7da_eca3_4b11_8a9c_308150aa9164.slice/crio-2e776becdd49efa9b86fdb191c8a3e6ecbdb51bc1c0c359a41bba7f2533430b3 WatchSource:0}: Error finding container 2e776becdd49efa9b86fdb191c8a3e6ecbdb51bc1c0c359a41bba7f2533430b3: Status 404 returned error can't find the container with id 2e776becdd49efa9b86fdb191c8a3e6ecbdb51bc1c0c359a41bba7f2533430b3 Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.779912 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.787444 4766 generic.go:334] "Generic (PLEG): container finished" podID="7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae" containerID="def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd" exitCode=0 Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.787638 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" event={"ID":"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae","Type":"ContainerDied","Data":"def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd"} Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.790427 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-mdmsr" event={"ID":"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc","Type":"ContainerStarted","Data":"6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151"} Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.790464 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-mdmsr" event={"ID":"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc","Type":"ContainerStarted","Data":"f2e9647893d0e9aa0f10763fccb7bac811c34b5883cf0c53a1db8700ba079915"} Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.800152 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerStarted","Data":"fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55"} Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.800222 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerStarted","Data":"b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca"} Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.800236 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerStarted","Data":"f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50"} Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.800249 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerStarted","Data":"8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e"} Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.800264 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerStarted","Data":"e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d"} Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.801665 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-56zlf" event={"ID":"795ac7da-eca3-4b11-8a9c-308150aa9164","Type":"ContainerStarted","Data":"2e776becdd49efa9b86fdb191c8a3e6ecbdb51bc1c0c359a41bba7f2533430b3"} Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.818379 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.857305 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.895619 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.934011 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:27 crc kubenswrapper[4766]: I0102 00:07:27.976060 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.054240 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.068376 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.092853 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.167378 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.198457 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.240479 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.258092 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.295025 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.335195 4766 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.335644 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.335806 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.335889 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.335929 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.335959 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.336035 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.336079 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.336098 4766 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.336117 4766 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.336152 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:32.33612913 +0000 UTC m=+27.134616284 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.336192 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:32.336170231 +0000 UTC m=+27.134657385 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.336213 4766 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.336253 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:32.336240933 +0000 UTC m=+27.134728297 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.336417 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.336488 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.336544 4766 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.336647 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:32.336629524 +0000 UTC m=+27.135116678 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.336765 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:07:32.336756939 +0000 UTC m=+27.135244093 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.337972 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.338026 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.338042 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.338112 4766 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.341301 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.391226 4766 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.391691 4766 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.393505 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.393575 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.393592 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.393624 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.393638 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:28Z","lastTransitionTime":"2026-01-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.418262 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.424161 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.424961 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.424998 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.425012 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.425073 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.425089 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:28Z","lastTransitionTime":"2026-01-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.439660 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.443285 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.443325 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.443333 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.443350 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.443359 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:28Z","lastTransitionTime":"2026-01-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.460811 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.461122 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.468369 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.468409 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.468418 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.468436 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.468447 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:28Z","lastTransitionTime":"2026-01-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.481642 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.486297 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.486405 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.486418 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.486468 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.486495 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:28Z","lastTransitionTime":"2026-01-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.491813 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.498638 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.498809 4766 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.503093 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.503130 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.503139 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.503152 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.503162 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:28Z","lastTransitionTime":"2026-01-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.533378 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.533472 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.533533 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.533631 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.533774 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:28 crc kubenswrapper[4766]: E0102 00:07:28.533890 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.606835 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.607287 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.607298 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.607324 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.607342 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:28Z","lastTransitionTime":"2026-01-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.710078 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.710109 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.710118 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.710131 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.710140 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:28Z","lastTransitionTime":"2026-01-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.809365 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerStarted","Data":"1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4"} Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.811447 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.811493 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.811502 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.811524 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.811535 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:28Z","lastTransitionTime":"2026-01-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.811952 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" event={"ID":"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae","Type":"ContainerStarted","Data":"ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af"} Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.813454 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-56zlf" event={"ID":"795ac7da-eca3-4b11-8a9c-308150aa9164","Type":"ContainerStarted","Data":"e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344"} Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.833283 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.849039 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.862174 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.878670 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.889461 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.907990 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.918558 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.918605 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.918617 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.918634 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.918644 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:28Z","lastTransitionTime":"2026-01-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.930554 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.943778 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.955943 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.969008 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.981507 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:28 crc kubenswrapper[4766]: I0102 00:07:28.994065 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.012901 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.020832 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.020923 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.020938 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.020965 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.021056 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:29Z","lastTransitionTime":"2026-01-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.050453 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.096210 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.126132 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.126184 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.126197 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.126215 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.126227 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:29Z","lastTransitionTime":"2026-01-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.135364 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.175136 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.224504 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.228905 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.228943 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.228953 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.228966 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.228976 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:29Z","lastTransitionTime":"2026-01-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.252349 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.295472 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.331901 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.331952 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.331966 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.331984 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.331999 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:29Z","lastTransitionTime":"2026-01-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.334478 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.375702 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.409980 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.434174 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.434226 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.434240 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.434257 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.434272 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:29Z","lastTransitionTime":"2026-01-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.453395 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.491912 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.534016 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.537016 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.537066 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.537083 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.537108 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.537122 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:29Z","lastTransitionTime":"2026-01-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.573951 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.616142 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.640323 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.640357 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.640368 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.640383 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.640394 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:29Z","lastTransitionTime":"2026-01-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.743431 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.743502 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.743517 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.743540 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.743555 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:29Z","lastTransitionTime":"2026-01-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.819933 4766 generic.go:334] "Generic (PLEG): container finished" podID="7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae" containerID="ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af" exitCode=0 Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.819973 4766 generic.go:334] "Generic (PLEG): container finished" podID="7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae" containerID="8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53" exitCode=0 Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.820018 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" event={"ID":"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae","Type":"ContainerDied","Data":"ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af"} Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.820050 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" event={"ID":"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae","Type":"ContainerDied","Data":"8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53"} Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.824229 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24"} Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.833602 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.849163 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.849219 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.849232 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.849251 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.849265 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:29Z","lastTransitionTime":"2026-01-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.855703 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.871410 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.885383 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.899149 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.915334 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.931224 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.953090 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.953123 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.953131 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.953145 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.953154 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:29Z","lastTransitionTime":"2026-01-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.959611 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:29 crc kubenswrapper[4766]: I0102 00:07:29.974475 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.015600 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.055184 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.055220 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.055230 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.055245 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.055256 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:30Z","lastTransitionTime":"2026-01-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.061077 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.096181 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.134511 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.157557 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.157583 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.157592 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.157605 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.157613 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:30Z","lastTransitionTime":"2026-01-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.173647 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.215972 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.256379 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.260398 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.260435 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.260447 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.260463 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.260474 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:30Z","lastTransitionTime":"2026-01-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.299227 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.333131 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.362599 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.362643 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.362659 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.362679 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.362694 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:30Z","lastTransitionTime":"2026-01-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.377002 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.437069 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.457912 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.464661 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.464727 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.464747 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.464772 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.464789 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:30Z","lastTransitionTime":"2026-01-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.500127 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.533826 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:30 crc kubenswrapper[4766]: E0102 00:07:30.534017 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.534144 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:30 crc kubenswrapper[4766]: E0102 00:07:30.534332 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.533864 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:30 crc kubenswrapper[4766]: E0102 00:07:30.534620 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.539641 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.566505 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.566664 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.566744 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.566887 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.567010 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:30Z","lastTransitionTime":"2026-01-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.572593 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.614009 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.654721 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.669744 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.670045 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.670159 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.670243 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.670327 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:30Z","lastTransitionTime":"2026-01-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.691597 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.734460 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.773652 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.773742 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.773757 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.773775 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.773790 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:30Z","lastTransitionTime":"2026-01-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.832365 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerStarted","Data":"2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77"} Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.835411 4766 generic.go:334] "Generic (PLEG): container finished" podID="7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae" containerID="625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b" exitCode=0 Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.835471 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" event={"ID":"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae","Type":"ContainerDied","Data":"625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b"} Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.862196 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.876260 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.876317 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.876336 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.876361 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.876378 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:30Z","lastTransitionTime":"2026-01-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.880091 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.900645 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.912920 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.931294 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.973118 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.982275 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.982338 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.982355 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.982743 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:30 crc kubenswrapper[4766]: I0102 00:07:30.982798 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:30Z","lastTransitionTime":"2026-01-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.013935 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.052362 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.085381 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.085424 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.085437 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.085452 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.085463 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:31Z","lastTransitionTime":"2026-01-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.092945 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.136734 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.177288 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.188369 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.188413 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.188424 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.188440 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.188452 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:31Z","lastTransitionTime":"2026-01-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.220642 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.262675 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.290878 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.290951 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.290974 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.291002 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.291023 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:31Z","lastTransitionTime":"2026-01-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.302454 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.394167 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.394201 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.394209 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.394222 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.394232 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:31Z","lastTransitionTime":"2026-01-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.496238 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.496314 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.496333 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.496355 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.496372 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:31Z","lastTransitionTime":"2026-01-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.599531 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.599767 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.599784 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.599800 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.599812 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:31Z","lastTransitionTime":"2026-01-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.702932 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.702981 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.702994 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.703011 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.703022 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:31Z","lastTransitionTime":"2026-01-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.806527 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.806581 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.806593 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.806612 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.806623 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:31Z","lastTransitionTime":"2026-01-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.843543 4766 generic.go:334] "Generic (PLEG): container finished" podID="7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae" containerID="e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9" exitCode=0 Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.843598 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" event={"ID":"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae","Type":"ContainerDied","Data":"e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9"} Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.865105 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.882576 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.901341 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.934681 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.945091 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.945193 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.945220 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.945256 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.945280 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:31Z","lastTransitionTime":"2026-01-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.953442 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.968184 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:31 crc kubenswrapper[4766]: I0102 00:07:31.993164 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.007822 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.022817 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.037041 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.048713 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.048764 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.048779 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.048797 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.048810 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:32Z","lastTransitionTime":"2026-01-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.052415 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.063587 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.073691 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.086768 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.150625 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.150654 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.150664 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.150680 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.150690 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:32Z","lastTransitionTime":"2026-01-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.253151 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.253202 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.253218 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.253240 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.253303 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:32Z","lastTransitionTime":"2026-01-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.360643 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.360701 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.360717 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.360738 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.360753 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:32Z","lastTransitionTime":"2026-01-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.377341 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.377521 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.377578 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.377618 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.377663 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:32 crc kubenswrapper[4766]: E0102 00:07:32.377702 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:07:32 crc kubenswrapper[4766]: E0102 00:07:32.377743 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:07:32 crc kubenswrapper[4766]: E0102 00:07:32.377761 4766 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:32 crc kubenswrapper[4766]: E0102 00:07:32.377818 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:07:32 crc kubenswrapper[4766]: E0102 00:07:32.377880 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:07:32 crc kubenswrapper[4766]: E0102 00:07:32.377901 4766 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:32 crc kubenswrapper[4766]: E0102 00:07:32.378042 4766 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:07:32 crc kubenswrapper[4766]: E0102 00:07:32.377825 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:40.377803517 +0000 UTC m=+35.176290671 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:32 crc kubenswrapper[4766]: E0102 00:07:32.378219 4766 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:07:32 crc kubenswrapper[4766]: E0102 00:07:32.378241 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:40.378227169 +0000 UTC m=+35.176714453 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:32 crc kubenswrapper[4766]: E0102 00:07:32.378261 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:07:40.378251319 +0000 UTC m=+35.176738593 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:07:32 crc kubenswrapper[4766]: E0102 00:07:32.378278 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:40.37826936 +0000 UTC m=+35.176756634 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:07:32 crc kubenswrapper[4766]: E0102 00:07:32.378294 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:40.37828604 +0000 UTC m=+35.176773354 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.463378 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.463420 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.463431 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.463450 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.463463 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:32Z","lastTransitionTime":"2026-01-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.533472 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.533532 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.533657 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:32 crc kubenswrapper[4766]: E0102 00:07:32.533874 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:32 crc kubenswrapper[4766]: E0102 00:07:32.534065 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:32 crc kubenswrapper[4766]: E0102 00:07:32.534335 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.567626 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.568480 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.568593 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.568634 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.568662 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:32Z","lastTransitionTime":"2026-01-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.670834 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.670918 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.670930 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.670949 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.670962 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:32Z","lastTransitionTime":"2026-01-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.773113 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.773704 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.773733 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.773764 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.773787 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:32Z","lastTransitionTime":"2026-01-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.848954 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerStarted","Data":"bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec"} Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.849302 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.853507 4766 generic.go:334] "Generic (PLEG): container finished" podID="7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae" containerID="a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6" exitCode=0 Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.853554 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" event={"ID":"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae","Type":"ContainerDied","Data":"a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6"} Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.862767 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.877541 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.878813 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.878871 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.878884 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.878901 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.878916 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:32Z","lastTransitionTime":"2026-01-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.885744 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.891065 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.912381 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.930633 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.948989 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.964177 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.974697 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.981524 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.981585 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.981596 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.981649 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.981663 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:32Z","lastTransitionTime":"2026-01-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:32 crc kubenswrapper[4766]: I0102 00:07:32.987059 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.001651 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.018784 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.032160 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.046819 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.057573 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.074008 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.084809 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.084863 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.084876 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.084891 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.084902 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:33Z","lastTransitionTime":"2026-01-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.091900 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.112835 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.126332 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.136175 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.147150 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.156715 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.170292 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.184174 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.194100 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.194146 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.194157 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.194173 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.194184 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:33Z","lastTransitionTime":"2026-01-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.198376 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.215135 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.231047 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.247676 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.265164 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.296646 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.296685 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.296693 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.296709 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.296718 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:33Z","lastTransitionTime":"2026-01-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.399665 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.399713 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.399722 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.399735 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.399744 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:33Z","lastTransitionTime":"2026-01-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.502460 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.502488 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.502496 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.502508 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.502517 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:33Z","lastTransitionTime":"2026-01-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.604685 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.604747 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.604791 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.604875 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.604919 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:33Z","lastTransitionTime":"2026-01-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.707779 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.707893 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.707912 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.707936 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.707957 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:33Z","lastTransitionTime":"2026-01-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.810962 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.811011 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.811020 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.811037 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.811046 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:33Z","lastTransitionTime":"2026-01-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.860614 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" event={"ID":"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae","Type":"ContainerStarted","Data":"baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511"} Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.860718 4766 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.861215 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.881437 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.897474 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.913495 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.913819 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.914031 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.914188 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.914297 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:33Z","lastTransitionTime":"2026-01-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.917712 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.936446 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.952826 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.958267 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.976662 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:33 crc kubenswrapper[4766]: I0102 00:07:33.992126 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.014400 4766 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.035377 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.062129 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.062214 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.062242 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.062275 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.062299 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:34Z","lastTransitionTime":"2026-01-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.088927 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.107592 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.127491 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.148706 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.166161 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.166226 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.166244 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.166270 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.166288 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:34Z","lastTransitionTime":"2026-01-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.167581 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.182047 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.202737 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.218726 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.239478 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.272102 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.272255 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.272309 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.272329 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.272353 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.272371 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:34Z","lastTransitionTime":"2026-01-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.291232 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.308199 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.324201 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.339977 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.353036 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.371134 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.374994 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.375081 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.375107 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.375141 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.375167 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:34Z","lastTransitionTime":"2026-01-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.389531 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.404207 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.417779 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.432370 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.478394 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.478443 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.478459 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.478480 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.478495 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:34Z","lastTransitionTime":"2026-01-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.533281 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.533328 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.533417 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:34 crc kubenswrapper[4766]: E0102 00:07:34.533473 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:34 crc kubenswrapper[4766]: E0102 00:07:34.533601 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:34 crc kubenswrapper[4766]: E0102 00:07:34.533707 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.583328 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.583532 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.583551 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.583596 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.583612 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:34Z","lastTransitionTime":"2026-01-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.686878 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.686931 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.686948 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.686969 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.686984 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:34Z","lastTransitionTime":"2026-01-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.789312 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.789632 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.789734 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.789834 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.789978 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:34Z","lastTransitionTime":"2026-01-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.866061 4766 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.892964 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.893012 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.893028 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.893050 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:34 crc kubenswrapper[4766]: I0102 00:07:34.893068 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:34Z","lastTransitionTime":"2026-01-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.026819 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.026910 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.026934 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.026959 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.026980 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:35Z","lastTransitionTime":"2026-01-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.129333 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.129375 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.129386 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.129403 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.129412 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:35Z","lastTransitionTime":"2026-01-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.232235 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.232275 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.232289 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.232310 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.232327 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:35Z","lastTransitionTime":"2026-01-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.335110 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.335160 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.335170 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.335183 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.335191 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:35Z","lastTransitionTime":"2026-01-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.437601 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.437650 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.437666 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.437688 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.437705 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:35Z","lastTransitionTime":"2026-01-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.541378 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.541446 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.541460 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.541483 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.541503 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:35Z","lastTransitionTime":"2026-01-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.567458 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.584273 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.599015 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.612476 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.628427 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.642487 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.643375 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.643414 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.643423 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.643439 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.643448 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:35Z","lastTransitionTime":"2026-01-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.654043 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.665137 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.679749 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.694216 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.707618 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.741065 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.747113 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.747152 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.747162 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.747180 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.747193 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:35Z","lastTransitionTime":"2026-01-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.765932 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.798479 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.850929 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.851386 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.851399 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.851420 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.851433 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:35Z","lastTransitionTime":"2026-01-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.868316 4766 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.954661 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.954715 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.954723 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.954738 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:35 crc kubenswrapper[4766]: I0102 00:07:35.954748 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:35Z","lastTransitionTime":"2026-01-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.057367 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.057410 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.057422 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.057443 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.057459 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:36Z","lastTransitionTime":"2026-01-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.160283 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.160358 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.160379 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.160402 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.160420 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:36Z","lastTransitionTime":"2026-01-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.263372 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.263433 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.263444 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.263460 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.263475 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:36Z","lastTransitionTime":"2026-01-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.367077 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.367145 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.367163 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.367188 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.367205 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:36Z","lastTransitionTime":"2026-01-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.469884 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.469930 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.469980 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.470004 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.470020 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:36Z","lastTransitionTime":"2026-01-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.533937 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.534051 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.533966 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:36 crc kubenswrapper[4766]: E0102 00:07:36.534104 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:36 crc kubenswrapper[4766]: E0102 00:07:36.534232 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:36 crc kubenswrapper[4766]: E0102 00:07:36.534387 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.572091 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.572140 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.572152 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.572169 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.572184 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:36Z","lastTransitionTime":"2026-01-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.674601 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.674663 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.674684 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.674706 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.674721 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:36Z","lastTransitionTime":"2026-01-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.778492 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.778570 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.778595 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.778619 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.778638 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:36Z","lastTransitionTime":"2026-01-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.872753 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovnkube-controller/0.log" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.876411 4766 generic.go:334] "Generic (PLEG): container finished" podID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerID="bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec" exitCode=1 Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.876453 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerDied","Data":"bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec"} Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.877059 4766 scope.go:117] "RemoveContainer" containerID="bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.881234 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.881299 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.881327 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.881356 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.881378 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:36Z","lastTransitionTime":"2026-01-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.891140 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.905735 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.917212 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.937303 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.952774 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.974191 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.983695 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.983771 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.983791 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.983820 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.983840 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:36Z","lastTransitionTime":"2026-01-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:36 crc kubenswrapper[4766]: I0102 00:07:36.999146 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:36Z\\\",\\\"message\\\":\\\"6.008355 6022 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0102 00:07:36.008359 6022 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0102 00:07:36.008373 6022 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0102 00:07:36.008379 6022 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0102 00:07:36.008384 6022 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0102 00:07:36.008390 6022 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0102 00:07:36.008399 6022 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0102 00:07:36.008482 6022 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0102 00:07:36.008520 6022 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0102 00:07:36.009150 6022 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0102 00:07:36.009225 6022 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0102 00:07:36.009275 6022 factory.go:656] Stopping watch factory\\\\nI0102 00:07:36.009304 6022 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0102 00:07:36.009319 6022 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.020107 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.039874 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.058538 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.069927 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.084273 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.086508 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.086558 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.086580 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.086603 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.086620 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:37Z","lastTransitionTime":"2026-01-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.108280 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.122218 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.125809 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.145476 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.166528 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.183606 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.193307 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.193376 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.193443 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.193460 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.193470 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:37Z","lastTransitionTime":"2026-01-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.199268 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.215757 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.229187 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.255937 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:36Z\\\",\\\"message\\\":\\\"6.008355 6022 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0102 00:07:36.008359 6022 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0102 00:07:36.008373 6022 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0102 00:07:36.008379 6022 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0102 00:07:36.008384 6022 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0102 00:07:36.008390 6022 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0102 00:07:36.008399 6022 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0102 00:07:36.008482 6022 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0102 00:07:36.008520 6022 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0102 00:07:36.009150 6022 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0102 00:07:36.009225 6022 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0102 00:07:36.009275 6022 factory.go:656] Stopping watch factory\\\\nI0102 00:07:36.009304 6022 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0102 00:07:36.009319 6022 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.272068 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.288975 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.296784 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.296820 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.296831 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.296865 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.296876 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:37Z","lastTransitionTime":"2026-01-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.307926 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.328434 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.345231 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.359658 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.369380 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.399202 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.399248 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.399258 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.399272 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.399282 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:37Z","lastTransitionTime":"2026-01-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.502184 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.502256 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.502271 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.502298 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.502318 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:37Z","lastTransitionTime":"2026-01-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.605450 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.605544 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.605559 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.605581 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.605598 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:37Z","lastTransitionTime":"2026-01-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.708422 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.708457 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.708471 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.708488 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.708501 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:37Z","lastTransitionTime":"2026-01-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.810804 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.810855 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.810864 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.810879 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.810888 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:37Z","lastTransitionTime":"2026-01-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.881930 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovnkube-controller/0.log" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.884685 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerStarted","Data":"c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e"} Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.884839 4766 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.899744 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.914716 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.914789 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.914800 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.914817 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.914828 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:37Z","lastTransitionTime":"2026-01-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.916393 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.929745 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.941900 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.953510 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.971325 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:36Z\\\",\\\"message\\\":\\\"6.008355 6022 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0102 00:07:36.008359 6022 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0102 00:07:36.008373 6022 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0102 00:07:36.008379 6022 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0102 00:07:36.008384 6022 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0102 00:07:36.008390 6022 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0102 00:07:36.008399 6022 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0102 00:07:36.008482 6022 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0102 00:07:36.008520 6022 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0102 00:07:36.009150 6022 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0102 00:07:36.009225 6022 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0102 00:07:36.009275 6022 factory.go:656] Stopping watch factory\\\\nI0102 00:07:36.009304 6022 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0102 00:07:36.009319 6022 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.983057 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:37 crc kubenswrapper[4766]: I0102 00:07:37.996497 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.011568 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:38Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.017378 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.017413 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.017422 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.017439 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.017448 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:38Z","lastTransitionTime":"2026-01-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.023223 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:38Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.040549 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:38Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.054532 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:38Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.073330 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:38Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.085508 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:38Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.120950 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.121008 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.121023 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.121043 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.121056 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:38Z","lastTransitionTime":"2026-01-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.223692 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.223749 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.223766 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.223788 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.223805 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:38Z","lastTransitionTime":"2026-01-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.328422 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.328501 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.328515 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.328534 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.328549 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:38Z","lastTransitionTime":"2026-01-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.431768 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.431809 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.431819 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.431836 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.431874 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:38Z","lastTransitionTime":"2026-01-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.533089 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.533108 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:38 crc kubenswrapper[4766]: E0102 00:07:38.533451 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:38 crc kubenswrapper[4766]: E0102 00:07:38.533523 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.533109 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:38 crc kubenswrapper[4766]: E0102 00:07:38.533638 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.535434 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.535464 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.535475 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.535489 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.535501 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:38Z","lastTransitionTime":"2026-01-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.637754 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.637820 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.637898 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.637926 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.637943 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:38Z","lastTransitionTime":"2026-01-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.741102 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.741148 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.741165 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.741204 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.741221 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:38Z","lastTransitionTime":"2026-01-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.844894 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.844949 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.844964 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.844986 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.845001 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:38Z","lastTransitionTime":"2026-01-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.852705 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.852756 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.852774 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.852797 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.852815 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:38Z","lastTransitionTime":"2026-01-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:38 crc kubenswrapper[4766]: E0102 00:07:38.873979 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:38Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.878672 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.878712 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.878723 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.878741 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.878752 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:38Z","lastTransitionTime":"2026-01-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.890082 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovnkube-controller/1.log" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.891371 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovnkube-controller/0.log" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.896240 4766 generic.go:334] "Generic (PLEG): container finished" podID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerID="c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e" exitCode=1 Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.896327 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerDied","Data":"c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e"} Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.896440 4766 scope.go:117] "RemoveContainer" containerID="bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.897945 4766 scope.go:117] "RemoveContainer" containerID="c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e" Jan 02 00:07:38 crc kubenswrapper[4766]: E0102 00:07:38.898294 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" Jan 02 00:07:38 crc kubenswrapper[4766]: E0102 00:07:38.899828 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:38Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.906085 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.906336 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.907009 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.907177 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.907315 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:38Z","lastTransitionTime":"2026-01-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.916953 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:38Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:38 crc kubenswrapper[4766]: E0102 00:07:38.924367 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:38Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.929640 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.929958 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.930061 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.930177 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.930264 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:38Z","lastTransitionTime":"2026-01-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.937520 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:38Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:38 crc kubenswrapper[4766]: E0102 00:07:38.943550 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:38Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.947302 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.947334 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.947346 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.947363 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.947374 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:38Z","lastTransitionTime":"2026-01-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.950652 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:38Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.965175 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:38Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:38 crc kubenswrapper[4766]: E0102 00:07:38.965280 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:38Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:38 crc kubenswrapper[4766]: E0102 00:07:38.966226 4766 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.967876 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.967932 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.967950 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.968051 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.968166 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:38Z","lastTransitionTime":"2026-01-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.978821 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:38Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:38 crc kubenswrapper[4766]: I0102 00:07:38.988169 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:38Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.002073 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.022044 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.039243 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.060981 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.070518 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.070571 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.070585 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.070604 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.070619 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:39Z","lastTransitionTime":"2026-01-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.080471 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.095366 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.114690 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:36Z\\\",\\\"message\\\":\\\"6.008355 6022 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0102 00:07:36.008359 6022 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0102 00:07:36.008373 6022 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0102 00:07:36.008379 6022 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0102 00:07:36.008384 6022 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0102 00:07:36.008390 6022 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0102 00:07:36.008399 6022 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0102 00:07:36.008482 6022 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0102 00:07:36.008520 6022 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0102 00:07:36.009150 6022 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0102 00:07:36.009225 6022 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0102 00:07:36.009275 6022 factory.go:656] Stopping watch factory\\\\nI0102 00:07:36.009304 6022 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0102 00:07:36.009319 6022 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"0:07:38.152963 6176 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nI0102 00:07:38.152969 6176 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0102 00:07:38.152972 6176 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0102 00:07:38.153001 6176 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0102 00:07:38.153040 6176 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.127537 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.173381 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.173417 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.173429 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.173444 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.173457 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:39Z","lastTransitionTime":"2026-01-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.277656 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.277708 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.277726 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.277748 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.277765 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:39Z","lastTransitionTime":"2026-01-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.278150 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799"] Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.278717 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.283417 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.283904 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.296714 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.308929 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.321365 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.340748 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.358761 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.379111 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xn7v\" (UniqueName: \"kubernetes.io/projected/9c79dbb1-c0a5-4c74-be7c-65e8823fc29a-kube-api-access-8xn7v\") pod \"ovnkube-control-plane-749d76644c-95799\" (UID: \"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.379206 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9c79dbb1-c0a5-4c74-be7c-65e8823fc29a-env-overrides\") pod \"ovnkube-control-plane-749d76644c-95799\" (UID: \"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.379272 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9c79dbb1-c0a5-4c74-be7c-65e8823fc29a-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-95799\" (UID: \"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.379323 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9c79dbb1-c0a5-4c74-be7c-65e8823fc29a-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-95799\" (UID: \"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.379623 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.380603 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.380643 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.380654 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.380672 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.380684 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:39Z","lastTransitionTime":"2026-01-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.393377 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.407694 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.420612 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.432718 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.449590 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.468350 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.480346 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9c79dbb1-c0a5-4c74-be7c-65e8823fc29a-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-95799\" (UID: \"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.480395 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9c79dbb1-c0a5-4c74-be7c-65e8823fc29a-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-95799\" (UID: \"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.480430 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xn7v\" (UniqueName: \"kubernetes.io/projected/9c79dbb1-c0a5-4c74-be7c-65e8823fc29a-kube-api-access-8xn7v\") pod \"ovnkube-control-plane-749d76644c-95799\" (UID: \"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.480476 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9c79dbb1-c0a5-4c74-be7c-65e8823fc29a-env-overrides\") pod \"ovnkube-control-plane-749d76644c-95799\" (UID: \"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.481090 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9c79dbb1-c0a5-4c74-be7c-65e8823fc29a-env-overrides\") pod \"ovnkube-control-plane-749d76644c-95799\" (UID: \"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.481593 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9c79dbb1-c0a5-4c74-be7c-65e8823fc29a-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-95799\" (UID: \"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.483345 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.483421 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.483444 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.483481 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.483503 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:39Z","lastTransitionTime":"2026-01-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.493664 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9c79dbb1-c0a5-4c74-be7c-65e8823fc29a-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-95799\" (UID: \"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.499212 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:36Z\\\",\\\"message\\\":\\\"6.008355 6022 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0102 00:07:36.008359 6022 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0102 00:07:36.008373 6022 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0102 00:07:36.008379 6022 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0102 00:07:36.008384 6022 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0102 00:07:36.008390 6022 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0102 00:07:36.008399 6022 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0102 00:07:36.008482 6022 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0102 00:07:36.008520 6022 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0102 00:07:36.009150 6022 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0102 00:07:36.009225 6022 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0102 00:07:36.009275 6022 factory.go:656] Stopping watch factory\\\\nI0102 00:07:36.009304 6022 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0102 00:07:36.009319 6022 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"0:07:38.152963 6176 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nI0102 00:07:38.152969 6176 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0102 00:07:38.152972 6176 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0102 00:07:38.153001 6176 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0102 00:07:38.153040 6176 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.509121 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xn7v\" (UniqueName: \"kubernetes.io/projected/9c79dbb1-c0a5-4c74-be7c-65e8823fc29a-kube-api-access-8xn7v\") pod \"ovnkube-control-plane-749d76644c-95799\" (UID: \"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.520898 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.534417 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.586212 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.586265 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.586281 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.586300 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.586316 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:39Z","lastTransitionTime":"2026-01-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.598730 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" Jan 02 00:07:39 crc kubenswrapper[4766]: W0102 00:07:39.612171 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c79dbb1_c0a5_4c74_be7c_65e8823fc29a.slice/crio-708f684911062233bcc0f1d22ba63e5f2dfb9b66113ca3c762c65d5da163bfba WatchSource:0}: Error finding container 708f684911062233bcc0f1d22ba63e5f2dfb9b66113ca3c762c65d5da163bfba: Status 404 returned error can't find the container with id 708f684911062233bcc0f1d22ba63e5f2dfb9b66113ca3c762c65d5da163bfba Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.691771 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.691809 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.691831 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.691860 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.691874 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:39Z","lastTransitionTime":"2026-01-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.794699 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.794868 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.794905 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.794940 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.794964 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:39Z","lastTransitionTime":"2026-01-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.896838 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.896930 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.896948 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.896972 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.896990 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:39Z","lastTransitionTime":"2026-01-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.899408 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" event={"ID":"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a","Type":"ContainerStarted","Data":"8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d"} Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.899478 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" event={"ID":"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a","Type":"ContainerStarted","Data":"708f684911062233bcc0f1d22ba63e5f2dfb9b66113ca3c762c65d5da163bfba"} Jan 02 00:07:39 crc kubenswrapper[4766]: I0102 00:07:39.902359 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovnkube-controller/1.log" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:39.999999 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.000069 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.000091 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.000124 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.000146 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:40Z","lastTransitionTime":"2026-01-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.103086 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.103173 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.103199 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.103232 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.103257 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:40Z","lastTransitionTime":"2026-01-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.205992 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.206043 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.206055 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.206076 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.206089 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:40Z","lastTransitionTime":"2026-01-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.307996 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.308051 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.308066 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.308087 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.308100 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:40Z","lastTransitionTime":"2026-01-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.391049 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.391200 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.391227 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:07:56.39120664 +0000 UTC m=+51.189693804 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.391271 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.391342 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.391401 4766 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.391487 4766 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.391523 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.391551 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:56.391537259 +0000 UTC m=+51.190024423 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.391551 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.391552 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.391575 4766 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.391593 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.391616 4766 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.391624 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:56.391610441 +0000 UTC m=+51.190097605 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.391650 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:56.391637892 +0000 UTC m=+51.190125066 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.391715 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:56.391660643 +0000 UTC m=+51.190147837 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.391408 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.411819 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.411931 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.411963 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.411993 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.412020 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:40Z","lastTransitionTime":"2026-01-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.423532 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-zd2t8"] Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.424397 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.424533 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.439807 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.457454 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.473564 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.493165 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs\") pod \"network-metrics-daemon-zd2t8\" (UID: \"f869b527-abf2-400f-90c2-ebee595564a2\") " pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.493242 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dj9l\" (UniqueName: \"kubernetes.io/projected/f869b527-abf2-400f-90c2-ebee595564a2-kube-api-access-5dj9l\") pod \"network-metrics-daemon-zd2t8\" (UID: \"f869b527-abf2-400f-90c2-ebee595564a2\") " pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.493577 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.514322 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.516240 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.516306 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.516326 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.516351 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.516371 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:40Z","lastTransitionTime":"2026-01-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.533051 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.533154 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.533355 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.533159 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.533470 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.533543 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.534180 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.548253 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.571495 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.591666 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.594068 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs\") pod \"network-metrics-daemon-zd2t8\" (UID: \"f869b527-abf2-400f-90c2-ebee595564a2\") " pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.594122 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dj9l\" (UniqueName: \"kubernetes.io/projected/f869b527-abf2-400f-90c2-ebee595564a2-kube-api-access-5dj9l\") pod \"network-metrics-daemon-zd2t8\" (UID: \"f869b527-abf2-400f-90c2-ebee595564a2\") " pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.594264 4766 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 02 00:07:40 crc kubenswrapper[4766]: E0102 00:07:40.594381 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs podName:f869b527-abf2-400f-90c2-ebee595564a2 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:41.094349123 +0000 UTC m=+35.892836307 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs") pod "network-metrics-daemon-zd2t8" (UID: "f869b527-abf2-400f-90c2-ebee595564a2") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.611304 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.617667 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dj9l\" (UniqueName: \"kubernetes.io/projected/f869b527-abf2-400f-90c2-ebee595564a2-kube-api-access-5dj9l\") pod \"network-metrics-daemon-zd2t8\" (UID: \"f869b527-abf2-400f-90c2-ebee595564a2\") " pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.619419 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.619463 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.619481 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.619504 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.619522 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:40Z","lastTransitionTime":"2026-01-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.633697 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.655107 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.689892 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:36Z\\\",\\\"message\\\":\\\"6.008355 6022 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0102 00:07:36.008359 6022 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0102 00:07:36.008373 6022 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0102 00:07:36.008379 6022 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0102 00:07:36.008384 6022 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0102 00:07:36.008390 6022 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0102 00:07:36.008399 6022 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0102 00:07:36.008482 6022 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0102 00:07:36.008520 6022 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0102 00:07:36.009150 6022 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0102 00:07:36.009225 6022 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0102 00:07:36.009275 6022 factory.go:656] Stopping watch factory\\\\nI0102 00:07:36.009304 6022 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0102 00:07:36.009319 6022 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"0:07:38.152963 6176 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nI0102 00:07:38.152969 6176 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0102 00:07:38.152972 6176 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0102 00:07:38.153001 6176 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0102 00:07:38.153040 6176 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.712708 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.722480 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.722544 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.722563 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.722591 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.722609 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:40Z","lastTransitionTime":"2026-01-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.736165 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.758742 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.825559 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.825612 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.825626 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.825643 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.825655 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:40Z","lastTransitionTime":"2026-01-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.913189 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" event={"ID":"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a","Type":"ContainerStarted","Data":"459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed"} Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.928471 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.928553 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.928579 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.928617 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.928641 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:40Z","lastTransitionTime":"2026-01-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.935216 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.953798 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.975303 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:40 crc kubenswrapper[4766]: I0102 00:07:40.999202 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.019342 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.033413 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.033491 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.033517 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.033549 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.033573 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:41Z","lastTransitionTime":"2026-01-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.035747 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.053020 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.071255 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.088064 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.103159 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs\") pod \"network-metrics-daemon-zd2t8\" (UID: \"f869b527-abf2-400f-90c2-ebee595564a2\") " pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:41 crc kubenswrapper[4766]: E0102 00:07:41.103375 4766 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 02 00:07:41 crc kubenswrapper[4766]: E0102 00:07:41.103459 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs podName:f869b527-abf2-400f-90c2-ebee595564a2 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:42.1034268 +0000 UTC m=+36.901913954 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs") pod "network-metrics-daemon-zd2t8" (UID: "f869b527-abf2-400f-90c2-ebee595564a2") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.108182 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.122812 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.139207 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.139973 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.140036 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.140057 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.140089 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.140108 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:41Z","lastTransitionTime":"2026-01-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.159543 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.181468 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.206513 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.236694 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bed2cc62002023df9ffdfed4aedca97b3dbe160f2c7d584ad1efca40e0aa34ec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:36Z\\\",\\\"message\\\":\\\"6.008355 6022 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0102 00:07:36.008359 6022 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0102 00:07:36.008373 6022 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0102 00:07:36.008379 6022 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0102 00:07:36.008384 6022 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0102 00:07:36.008390 6022 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0102 00:07:36.008399 6022 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0102 00:07:36.008482 6022 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0102 00:07:36.008520 6022 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0102 00:07:36.009150 6022 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0102 00:07:36.009225 6022 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0102 00:07:36.009275 6022 factory.go:656] Stopping watch factory\\\\nI0102 00:07:36.009304 6022 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0102 00:07:36.009319 6022 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"0:07:38.152963 6176 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nI0102 00:07:38.152969 6176 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0102 00:07:38.152972 6176 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0102 00:07:38.153001 6176 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0102 00:07:38.153040 6176 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.242802 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.242867 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.242880 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.242898 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.242911 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:41Z","lastTransitionTime":"2026-01-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.345684 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.345715 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.345726 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.345741 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.345752 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:41Z","lastTransitionTime":"2026-01-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.448259 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.448331 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.448349 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.448372 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.448389 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:41Z","lastTransitionTime":"2026-01-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.551185 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.551253 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.551354 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.551382 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.551400 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:41Z","lastTransitionTime":"2026-01-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.654452 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.654603 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.654624 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.654648 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.654667 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:41Z","lastTransitionTime":"2026-01-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.758815 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.758912 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.758930 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.758954 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.758973 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:41Z","lastTransitionTime":"2026-01-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.862987 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.863082 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.863132 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.863159 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.863178 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:41Z","lastTransitionTime":"2026-01-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.966643 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.966700 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.966717 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.966742 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:41 crc kubenswrapper[4766]: I0102 00:07:41.966760 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:41Z","lastTransitionTime":"2026-01-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.070391 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.070462 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.070482 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.070509 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.070527 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:42Z","lastTransitionTime":"2026-01-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.114740 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs\") pod \"network-metrics-daemon-zd2t8\" (UID: \"f869b527-abf2-400f-90c2-ebee595564a2\") " pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:42 crc kubenswrapper[4766]: E0102 00:07:42.115025 4766 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 02 00:07:42 crc kubenswrapper[4766]: E0102 00:07:42.115123 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs podName:f869b527-abf2-400f-90c2-ebee595564a2 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:44.115096719 +0000 UTC m=+38.913583913 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs") pod "network-metrics-daemon-zd2t8" (UID: "f869b527-abf2-400f-90c2-ebee595564a2") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.173348 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.173411 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.173428 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.173456 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.173473 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:42Z","lastTransitionTime":"2026-01-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.277112 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.277153 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.277164 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.277184 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.277196 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:42Z","lastTransitionTime":"2026-01-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.379913 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.379967 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.379986 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.380050 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.380070 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:42Z","lastTransitionTime":"2026-01-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.483662 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.483720 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.483739 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.483763 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.483781 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:42Z","lastTransitionTime":"2026-01-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.533919 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.533976 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.533990 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:42 crc kubenswrapper[4766]: E0102 00:07:42.534124 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.534166 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:42 crc kubenswrapper[4766]: E0102 00:07:42.534212 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:42 crc kubenswrapper[4766]: E0102 00:07:42.534328 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:42 crc kubenswrapper[4766]: E0102 00:07:42.534537 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.587428 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.587502 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.587521 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.587548 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.587566 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:42Z","lastTransitionTime":"2026-01-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.691412 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.691481 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.691498 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.691523 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.691539 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:42Z","lastTransitionTime":"2026-01-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.795213 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.795302 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.795322 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.795353 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.795439 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:42Z","lastTransitionTime":"2026-01-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.909916 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.910024 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.910070 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.910106 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:42 crc kubenswrapper[4766]: I0102 00:07:42.910134 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:42Z","lastTransitionTime":"2026-01-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.013097 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.013173 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.013190 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.013219 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.013238 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:43Z","lastTransitionTime":"2026-01-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.116734 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.116801 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.116819 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.116895 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.116919 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:43Z","lastTransitionTime":"2026-01-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.129578 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.131074 4766 scope.go:117] "RemoveContainer" containerID="c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e" Jan 02 00:07:43 crc kubenswrapper[4766]: E0102 00:07:43.131370 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.153441 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.175544 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.200401 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.220706 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.222037 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.222203 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.222263 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.222296 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.222349 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:43Z","lastTransitionTime":"2026-01-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.239045 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.258498 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.274657 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.294370 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.315337 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.327081 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.327143 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.327161 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.327186 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.327200 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:43Z","lastTransitionTime":"2026-01-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.332735 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.357567 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.384998 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.415044 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.430235 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.430455 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.430561 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.430641 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.430720 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:43Z","lastTransitionTime":"2026-01-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.433583 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.450267 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.475173 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"0:07:38.152963 6176 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nI0102 00:07:38.152969 6176 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0102 00:07:38.152972 6176 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0102 00:07:38.153001 6176 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0102 00:07:38.153040 6176 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.533591 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.533671 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.533696 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.533729 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.533756 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:43Z","lastTransitionTime":"2026-01-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.636008 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.636080 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.636095 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.636139 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.636153 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:43Z","lastTransitionTime":"2026-01-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.739007 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.739376 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.739569 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.739796 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.740091 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:43Z","lastTransitionTime":"2026-01-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.843044 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.843364 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.843508 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.843629 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.843737 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:43Z","lastTransitionTime":"2026-01-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.946935 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.947025 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.947075 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.947102 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:43 crc kubenswrapper[4766]: I0102 00:07:43.947119 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:43Z","lastTransitionTime":"2026-01-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.050613 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.050679 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.050699 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.050725 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.050743 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:44Z","lastTransitionTime":"2026-01-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.137805 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs\") pod \"network-metrics-daemon-zd2t8\" (UID: \"f869b527-abf2-400f-90c2-ebee595564a2\") " pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:44 crc kubenswrapper[4766]: E0102 00:07:44.138161 4766 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 02 00:07:44 crc kubenswrapper[4766]: E0102 00:07:44.138652 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs podName:f869b527-abf2-400f-90c2-ebee595564a2 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:48.138620282 +0000 UTC m=+42.937107466 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs") pod "network-metrics-daemon-zd2t8" (UID: "f869b527-abf2-400f-90c2-ebee595564a2") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.154547 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.154619 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.154641 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.154674 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.154696 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:44Z","lastTransitionTime":"2026-01-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.257992 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.258080 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.258099 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.258131 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.258149 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:44Z","lastTransitionTime":"2026-01-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.361294 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.361372 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.361402 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.361433 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.361452 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:44Z","lastTransitionTime":"2026-01-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.464593 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.464662 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.464685 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.464715 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.464736 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:44Z","lastTransitionTime":"2026-01-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.533435 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.533468 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.533506 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.533581 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:44 crc kubenswrapper[4766]: E0102 00:07:44.533781 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:44 crc kubenswrapper[4766]: E0102 00:07:44.534370 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:44 crc kubenswrapper[4766]: E0102 00:07:44.534474 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:44 crc kubenswrapper[4766]: E0102 00:07:44.534575 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.566766 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.566838 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.567144 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.567502 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.567532 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:44Z","lastTransitionTime":"2026-01-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.673948 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.674015 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.674034 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.674059 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.674078 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:44Z","lastTransitionTime":"2026-01-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.778077 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.778169 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.778194 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.778231 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.778254 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:44Z","lastTransitionTime":"2026-01-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.881357 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.881421 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.881439 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.881462 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.881480 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:44Z","lastTransitionTime":"2026-01-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.984954 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.985009 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.985026 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.985051 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:44 crc kubenswrapper[4766]: I0102 00:07:44.985068 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:44Z","lastTransitionTime":"2026-01-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.088431 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.088505 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.088530 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.088562 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.088583 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:45Z","lastTransitionTime":"2026-01-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.191541 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.191912 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.192142 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.192358 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.192579 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:45Z","lastTransitionTime":"2026-01-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.295736 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.295794 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.295811 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.295835 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.295889 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:45Z","lastTransitionTime":"2026-01-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.398974 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.399045 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.399060 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.399084 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.399098 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:45Z","lastTransitionTime":"2026-01-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.503141 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.503234 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.503534 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.503570 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.503589 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:45Z","lastTransitionTime":"2026-01-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.591275 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.606581 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.606648 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.606671 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.606700 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.606724 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:45Z","lastTransitionTime":"2026-01-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.611672 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.639833 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.673684 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"0:07:38.152963 6176 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nI0102 00:07:38.152969 6176 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0102 00:07:38.152972 6176 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0102 00:07:38.153001 6176 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0102 00:07:38.153040 6176 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.692067 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.707326 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.710545 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.710618 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.710638 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.710665 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.710683 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:45Z","lastTransitionTime":"2026-01-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.728740 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.743833 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.760083 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.781026 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.792807 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.806162 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.814092 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.814172 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.814196 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.814227 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.814250 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:45Z","lastTransitionTime":"2026-01-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.820536 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.835273 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.851103 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.867256 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.917080 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.917145 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.917163 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.917188 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:45 crc kubenswrapper[4766]: I0102 00:07:45.917207 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:45Z","lastTransitionTime":"2026-01-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.019836 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.019928 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.019944 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.019969 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.019986 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:46Z","lastTransitionTime":"2026-01-02T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.123105 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.123170 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.123195 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.123227 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.123251 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:46Z","lastTransitionTime":"2026-01-02T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.228567 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.228605 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.228617 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.228633 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.228644 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:46Z","lastTransitionTime":"2026-01-02T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.331339 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.331396 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.331413 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.331432 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.331447 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:46Z","lastTransitionTime":"2026-01-02T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.434312 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.434358 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.434369 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.434384 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.434395 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:46Z","lastTransitionTime":"2026-01-02T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.533835 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.533932 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.533938 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.534001 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:46 crc kubenswrapper[4766]: E0102 00:07:46.534142 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:07:46 crc kubenswrapper[4766]: E0102 00:07:46.534220 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:46 crc kubenswrapper[4766]: E0102 00:07:46.534391 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:46 crc kubenswrapper[4766]: E0102 00:07:46.534570 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.537207 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.537258 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.537276 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.537295 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.537310 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:46Z","lastTransitionTime":"2026-01-02T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.640375 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.640802 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.640826 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.640899 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.640926 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:46Z","lastTransitionTime":"2026-01-02T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.744504 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.744618 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.744638 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.744671 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.744695 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:46Z","lastTransitionTime":"2026-01-02T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.847665 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.847711 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.847721 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.847738 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.847749 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:46Z","lastTransitionTime":"2026-01-02T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.950791 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.950917 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.950941 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.950969 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:46 crc kubenswrapper[4766]: I0102 00:07:46.950988 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:46Z","lastTransitionTime":"2026-01-02T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.057741 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.057807 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.057825 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.057889 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.057919 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:47Z","lastTransitionTime":"2026-01-02T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.161162 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.161221 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.161239 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.161264 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.161281 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:47Z","lastTransitionTime":"2026-01-02T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.264797 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.264923 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.264935 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.264952 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.264961 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:47Z","lastTransitionTime":"2026-01-02T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.367656 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.367711 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.367724 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.367747 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.367763 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:47Z","lastTransitionTime":"2026-01-02T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.470792 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.470882 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.470904 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.470929 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.470942 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:47Z","lastTransitionTime":"2026-01-02T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.574647 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.574708 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.574725 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.574750 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.574770 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:47Z","lastTransitionTime":"2026-01-02T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.678020 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.678095 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.678115 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.678143 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.678164 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:47Z","lastTransitionTime":"2026-01-02T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.781499 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.781576 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.781597 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.781626 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.781649 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:47Z","lastTransitionTime":"2026-01-02T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.884877 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.884950 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.884970 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.885006 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.885041 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:47Z","lastTransitionTime":"2026-01-02T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.987388 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.987439 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.987455 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.987478 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:47 crc kubenswrapper[4766]: I0102 00:07:47.987495 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:47Z","lastTransitionTime":"2026-01-02T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.089774 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.089831 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.089886 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.089913 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.089927 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:48Z","lastTransitionTime":"2026-01-02T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.192635 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.192691 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.192709 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.192770 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.192796 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:48Z","lastTransitionTime":"2026-01-02T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.223952 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs\") pod \"network-metrics-daemon-zd2t8\" (UID: \"f869b527-abf2-400f-90c2-ebee595564a2\") " pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:48 crc kubenswrapper[4766]: E0102 00:07:48.224322 4766 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 02 00:07:48 crc kubenswrapper[4766]: E0102 00:07:48.224517 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs podName:f869b527-abf2-400f-90c2-ebee595564a2 nodeName:}" failed. No retries permitted until 2026-01-02 00:07:56.224487334 +0000 UTC m=+51.022974528 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs") pod "network-metrics-daemon-zd2t8" (UID: "f869b527-abf2-400f-90c2-ebee595564a2") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.296221 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.296285 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.296306 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.296335 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.296359 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:48Z","lastTransitionTime":"2026-01-02T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.399340 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.399392 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.399410 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.399434 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.399451 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:48Z","lastTransitionTime":"2026-01-02T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.502434 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.502496 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.502509 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.502530 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.502543 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:48Z","lastTransitionTime":"2026-01-02T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.533462 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.533536 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.533615 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:48 crc kubenswrapper[4766]: E0102 00:07:48.533646 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.533659 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:48 crc kubenswrapper[4766]: E0102 00:07:48.533750 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:48 crc kubenswrapper[4766]: E0102 00:07:48.533978 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:48 crc kubenswrapper[4766]: E0102 00:07:48.534167 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.607295 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.607374 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.607397 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.607431 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.607453 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:48Z","lastTransitionTime":"2026-01-02T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.710649 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.710713 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.710727 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.710764 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.710776 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:48Z","lastTransitionTime":"2026-01-02T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.813653 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.813713 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.813728 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.813749 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.813765 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:48Z","lastTransitionTime":"2026-01-02T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.916263 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.916302 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.916312 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.916326 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:48 crc kubenswrapper[4766]: I0102 00:07:48.916335 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:48Z","lastTransitionTime":"2026-01-02T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.020298 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.020401 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.020427 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.020458 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.020480 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:49Z","lastTransitionTime":"2026-01-02T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.123690 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.123774 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.123803 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.123894 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.123927 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:49Z","lastTransitionTime":"2026-01-02T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.172500 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.172567 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.172592 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.172622 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.172649 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:49Z","lastTransitionTime":"2026-01-02T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:49 crc kubenswrapper[4766]: E0102 00:07:49.192427 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.197458 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.197494 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.197521 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.197538 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.197547 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:49Z","lastTransitionTime":"2026-01-02T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:49 crc kubenswrapper[4766]: E0102 00:07:49.218254 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.223914 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.224010 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.224032 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.224089 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.224108 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:49Z","lastTransitionTime":"2026-01-02T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:49 crc kubenswrapper[4766]: E0102 00:07:49.250378 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.255560 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.255604 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.255622 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.255647 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.255664 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:49Z","lastTransitionTime":"2026-01-02T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:49 crc kubenswrapper[4766]: E0102 00:07:49.278137 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.282231 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.282289 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.282307 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.282326 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.282342 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:49Z","lastTransitionTime":"2026-01-02T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:49 crc kubenswrapper[4766]: E0102 00:07:49.301897 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:49 crc kubenswrapper[4766]: E0102 00:07:49.302083 4766 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.303679 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.303897 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.303917 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.303940 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.303956 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:49Z","lastTransitionTime":"2026-01-02T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.407261 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.407293 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.407306 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.407321 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.407334 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:49Z","lastTransitionTime":"2026-01-02T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.510555 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.510612 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.510631 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.510655 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.510672 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:49Z","lastTransitionTime":"2026-01-02T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.614771 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.614866 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.614886 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.614917 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.614937 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:49Z","lastTransitionTime":"2026-01-02T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.717632 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.717677 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.717688 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.717703 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.717715 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:49Z","lastTransitionTime":"2026-01-02T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.820736 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.820794 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.820805 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.820817 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.820826 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:49Z","lastTransitionTime":"2026-01-02T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.923940 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.924003 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.924015 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.924034 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:49 crc kubenswrapper[4766]: I0102 00:07:49.924046 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:49Z","lastTransitionTime":"2026-01-02T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.030382 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.030421 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.030430 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.030464 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.030474 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:50Z","lastTransitionTime":"2026-01-02T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.133406 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.133461 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.133480 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.133501 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.133514 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:50Z","lastTransitionTime":"2026-01-02T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.236223 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.236280 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.236298 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.236320 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.236336 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:50Z","lastTransitionTime":"2026-01-02T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.338737 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.338895 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.338916 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.338948 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.338968 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:50Z","lastTransitionTime":"2026-01-02T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.441513 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.441573 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.441588 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.441607 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.441622 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:50Z","lastTransitionTime":"2026-01-02T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.533524 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.533578 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.533606 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.533606 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:50 crc kubenswrapper[4766]: E0102 00:07:50.533737 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:50 crc kubenswrapper[4766]: E0102 00:07:50.533982 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:07:50 crc kubenswrapper[4766]: E0102 00:07:50.534123 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:50 crc kubenswrapper[4766]: E0102 00:07:50.534235 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.545050 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.545116 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.545143 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.545172 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.545192 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:50Z","lastTransitionTime":"2026-01-02T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.647752 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.647795 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.647808 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.647866 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.647883 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:50Z","lastTransitionTime":"2026-01-02T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.750814 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.750917 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.750935 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.750964 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.750990 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:50Z","lastTransitionTime":"2026-01-02T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.853644 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.853694 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.853706 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.853725 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.853738 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:50Z","lastTransitionTime":"2026-01-02T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.956514 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.956571 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.956591 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.956615 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:50 crc kubenswrapper[4766]: I0102 00:07:50.956632 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:50Z","lastTransitionTime":"2026-01-02T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.059408 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.059488 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.059499 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.059523 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.059545 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:51Z","lastTransitionTime":"2026-01-02T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.162393 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.162455 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.162477 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.162517 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.162565 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:51Z","lastTransitionTime":"2026-01-02T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.265765 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.265868 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.265885 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.265912 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.265929 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:51Z","lastTransitionTime":"2026-01-02T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.369407 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.369479 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.369499 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.369527 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.369548 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:51Z","lastTransitionTime":"2026-01-02T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.472911 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.473019 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.473048 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.473153 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.473177 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:51Z","lastTransitionTime":"2026-01-02T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.576084 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.576153 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.576174 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.576205 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.576232 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:51Z","lastTransitionTime":"2026-01-02T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.679373 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.679427 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.679445 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.679501 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.679518 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:51Z","lastTransitionTime":"2026-01-02T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.782477 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.782587 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.782621 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.782659 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.782687 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:51Z","lastTransitionTime":"2026-01-02T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.886103 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.886181 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.886196 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.886220 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.886235 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:51Z","lastTransitionTime":"2026-01-02T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.989353 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.989449 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.989471 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.989496 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:51 crc kubenswrapper[4766]: I0102 00:07:51.989513 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:51Z","lastTransitionTime":"2026-01-02T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.093931 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.093998 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.094017 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.094042 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.094066 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:52Z","lastTransitionTime":"2026-01-02T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.197700 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.197768 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.197787 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.197813 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.197832 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:52Z","lastTransitionTime":"2026-01-02T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.300324 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.300427 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.300443 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.300469 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.300487 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:52Z","lastTransitionTime":"2026-01-02T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.403430 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.403478 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.403493 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.403515 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.403533 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:52Z","lastTransitionTime":"2026-01-02T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.507068 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.507128 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.507150 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.507179 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.507200 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:52Z","lastTransitionTime":"2026-01-02T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.533757 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.533833 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.533800 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.533944 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:52 crc kubenswrapper[4766]: E0102 00:07:52.534006 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:52 crc kubenswrapper[4766]: E0102 00:07:52.534107 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:52 crc kubenswrapper[4766]: E0102 00:07:52.534232 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:07:52 crc kubenswrapper[4766]: E0102 00:07:52.534351 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.609660 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.610046 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.610194 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.610357 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.610480 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:52Z","lastTransitionTime":"2026-01-02T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.714348 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.714402 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.714418 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.714442 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.714458 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:52Z","lastTransitionTime":"2026-01-02T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.821742 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.821810 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.821828 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.821900 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.821938 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:52Z","lastTransitionTime":"2026-01-02T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.925032 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.925566 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.925692 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.925906 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:52 crc kubenswrapper[4766]: I0102 00:07:52.926076 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:52Z","lastTransitionTime":"2026-01-02T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.030007 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.030085 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.030108 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.030140 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.030163 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:53Z","lastTransitionTime":"2026-01-02T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.133714 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.133769 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.133787 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.133809 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.133829 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:53Z","lastTransitionTime":"2026-01-02T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.237073 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.237151 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.237172 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.237228 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.237251 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:53Z","lastTransitionTime":"2026-01-02T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.340240 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.340314 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.340333 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.340360 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.340376 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:53Z","lastTransitionTime":"2026-01-02T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.443192 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.443240 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.443274 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.443292 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.443303 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:53Z","lastTransitionTime":"2026-01-02T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.546025 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.546092 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.546112 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.546134 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.546153 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:53Z","lastTransitionTime":"2026-01-02T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.649510 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.649584 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.649606 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.649634 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.649656 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:53Z","lastTransitionTime":"2026-01-02T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.752435 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.752518 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.752543 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.752576 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.752598 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:53Z","lastTransitionTime":"2026-01-02T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.855455 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.855537 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.855560 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.855587 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.855606 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:53Z","lastTransitionTime":"2026-01-02T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.957712 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.957773 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.957791 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.957819 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:53 crc kubenswrapper[4766]: I0102 00:07:53.957884 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:53Z","lastTransitionTime":"2026-01-02T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.061454 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.061519 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.061536 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.061560 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.061579 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:54Z","lastTransitionTime":"2026-01-02T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.165372 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.165428 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.165439 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.165458 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.165470 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:54Z","lastTransitionTime":"2026-01-02T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.268010 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.268066 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.268077 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.268095 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.268108 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:54Z","lastTransitionTime":"2026-01-02T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.371839 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.371937 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.371956 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.371983 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.372012 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:54Z","lastTransitionTime":"2026-01-02T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.475305 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.475371 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.475388 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.475415 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.475433 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:54Z","lastTransitionTime":"2026-01-02T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.534148 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.534171 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.534263 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.534316 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:54 crc kubenswrapper[4766]: E0102 00:07:54.534517 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:54 crc kubenswrapper[4766]: E0102 00:07:54.534619 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:54 crc kubenswrapper[4766]: E0102 00:07:54.534739 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:07:54 crc kubenswrapper[4766]: E0102 00:07:54.534820 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.578927 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.579011 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.579035 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.579066 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.579088 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:54Z","lastTransitionTime":"2026-01-02T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.682764 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.682919 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.682947 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.683017 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.683044 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:54Z","lastTransitionTime":"2026-01-02T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.786413 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.786500 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.786537 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.786567 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.786591 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:54Z","lastTransitionTime":"2026-01-02T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.878600 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.893948 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.895166 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.895272 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.895303 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.895331 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.895355 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:54Z","lastTransitionTime":"2026-01-02T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.899411 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:54Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.915578 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:54Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.942815 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"0:07:38.152963 6176 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nI0102 00:07:38.152969 6176 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0102 00:07:38.152972 6176 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0102 00:07:38.153001 6176 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0102 00:07:38.153040 6176 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:54Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.966311 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:54Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.982698 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:54Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.997970 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.998036 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.998050 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.998071 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:54 crc kubenswrapper[4766]: I0102 00:07:54.998105 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:54Z","lastTransitionTime":"2026-01-02T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.000308 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:54Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.014462 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.036659 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.054902 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.070350 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.081305 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.093295 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.103892 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.104254 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.104589 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.107008 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.107037 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.107049 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:55Z","lastTransitionTime":"2026-01-02T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.115482 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.129278 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.139440 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.210339 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.210388 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.210404 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.210426 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.210443 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:55Z","lastTransitionTime":"2026-01-02T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.313832 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.313890 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.313900 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.313920 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.313932 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:55Z","lastTransitionTime":"2026-01-02T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.417212 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.417251 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.417260 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.417276 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.417284 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:55Z","lastTransitionTime":"2026-01-02T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.518997 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.519030 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.519038 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.519052 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.519061 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:55Z","lastTransitionTime":"2026-01-02T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.534465 4766 scope.go:117] "RemoveContainer" containerID="c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.545428 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.560154 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.576942 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62b90892-e222-44ae-8145-d377c00e8df9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a27f65aeb508c6780fd6af38a7850a088580d9434e4149e959247cf694b3751\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://203b2285af60ff595b7712de3fbac8ef7ef3f3a1309a904615ea3465b7441d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fc7bf3d6725b377ad3961d854087aa94ab8584c9e9fd376100c030e9a04d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.589290 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.651496 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.653494 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.653555 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.653579 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.653611 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.653635 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:55Z","lastTransitionTime":"2026-01-02T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.668481 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.681602 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.697417 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.710367 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.726472 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.749445 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"0:07:38.152963 6176 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nI0102 00:07:38.152969 6176 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0102 00:07:38.152972 6176 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0102 00:07:38.153001 6176 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0102 00:07:38.153040 6176 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.756754 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.756773 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.756782 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.756797 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.756806 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:55Z","lastTransitionTime":"2026-01-02T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.762111 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.772362 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.788511 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.799239 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.809868 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.822768 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:55Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.859439 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.859498 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.859515 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.859541 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.859559 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:55Z","lastTransitionTime":"2026-01-02T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.963151 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.963231 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.963243 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.963287 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.963301 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:55Z","lastTransitionTime":"2026-01-02T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.982206 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovnkube-controller/1.log" Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.993922 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerStarted","Data":"eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad"} Jan 02 00:07:55 crc kubenswrapper[4766]: I0102 00:07:55.995286 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.016860 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.037251 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.066650 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.066692 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.066701 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.066718 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.066730 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:56Z","lastTransitionTime":"2026-01-02T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.066793 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"0:07:38.152963 6176 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nI0102 00:07:38.152969 6176 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0102 00:07:38.152972 6176 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0102 00:07:38.153001 6176 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0102 00:07:38.153040 6176 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.083197 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.110822 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.126651 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.144622 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.153965 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.168891 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.169728 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.169761 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.169770 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.169789 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.169803 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:56Z","lastTransitionTime":"2026-01-02T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.181150 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.200482 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.215742 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62b90892-e222-44ae-8145-d377c00e8df9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a27f65aeb508c6780fd6af38a7850a088580d9434e4149e959247cf694b3751\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://203b2285af60ff595b7712de3fbac8ef7ef3f3a1309a904615ea3465b7441d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fc7bf3d6725b377ad3961d854087aa94ab8584c9e9fd376100c030e9a04d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.232227 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.245458 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.262901 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.272624 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.272694 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.272710 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.272735 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.272752 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:56Z","lastTransitionTime":"2026-01-02T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.279102 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.291650 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.316282 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs\") pod \"network-metrics-daemon-zd2t8\" (UID: \"f869b527-abf2-400f-90c2-ebee595564a2\") " pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.316568 4766 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.316714 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs podName:f869b527-abf2-400f-90c2-ebee595564a2 nodeName:}" failed. No retries permitted until 2026-01-02 00:08:12.316684187 +0000 UTC m=+67.115171381 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs") pod "network-metrics-daemon-zd2t8" (UID: "f869b527-abf2-400f-90c2-ebee595564a2") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.375098 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.375144 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.375154 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.375167 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.375178 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:56Z","lastTransitionTime":"2026-01-02T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.417224 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.417383 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.417476 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:08:28.41744729 +0000 UTC m=+83.215934444 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.417589 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.417640 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.417683 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.417604 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.417729 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.417745 4766 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.417796 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-02 00:08:28.417779059 +0000 UTC m=+83.216266223 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.417809 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.417829 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.417861 4766 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.417898 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-02 00:08:28.417888762 +0000 UTC m=+83.216376006 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.417954 4766 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.417986 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:08:28.417976255 +0000 UTC m=+83.216463499 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.417699 4766 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.418023 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:08:28.418014176 +0000 UTC m=+83.216501440 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.476860 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.477285 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.477297 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.477314 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.477322 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:56Z","lastTransitionTime":"2026-01-02T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.533602 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.533668 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.533666 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.533636 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.533766 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.533942 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.534004 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:56 crc kubenswrapper[4766]: E0102 00:07:56.534073 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.579422 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.579468 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.579481 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.579499 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.579512 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:56Z","lastTransitionTime":"2026-01-02T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.681877 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.681906 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.681914 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.681926 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.681935 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:56Z","lastTransitionTime":"2026-01-02T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.784883 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.784933 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.784943 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.784957 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.784967 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:56Z","lastTransitionTime":"2026-01-02T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.887730 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.887805 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.887826 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.887893 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.887919 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:56Z","lastTransitionTime":"2026-01-02T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.989795 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.989868 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.989886 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.989908 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:56 crc kubenswrapper[4766]: I0102 00:07:56.989928 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:56Z","lastTransitionTime":"2026-01-02T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.000080 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovnkube-controller/2.log" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.001180 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovnkube-controller/1.log" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.005744 4766 generic.go:334] "Generic (PLEG): container finished" podID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerID="eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad" exitCode=1 Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.005809 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerDied","Data":"eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad"} Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.005884 4766 scope.go:117] "RemoveContainer" containerID="c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.007041 4766 scope.go:117] "RemoveContainer" containerID="eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad" Jan 02 00:07:57 crc kubenswrapper[4766]: E0102 00:07:57.007368 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.035609 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:57Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.048903 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:57Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.067617 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:57Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.085003 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:57Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.092779 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.092876 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.092902 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.092933 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.092955 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:57Z","lastTransitionTime":"2026-01-02T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.099747 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:57Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.112048 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:57Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.125060 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62b90892-e222-44ae-8145-d377c00e8df9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a27f65aeb508c6780fd6af38a7850a088580d9434e4149e959247cf694b3751\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://203b2285af60ff595b7712de3fbac8ef7ef3f3a1309a904615ea3465b7441d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fc7bf3d6725b377ad3961d854087aa94ab8584c9e9fd376100c030e9a04d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:57Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.143532 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:57Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.160249 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:57Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.183689 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3cfd7070622f70610eac6c82cd27272776d0689899568a2201b019960c4bd1e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:38Z\\\",\\\"message\\\":\\\"0:07:38.152963 6176 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nI0102 00:07:38.152969 6176 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0102 00:07:38.152972 6176 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0102 00:07:38.153001 6176 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0102 00:07:38.153040 6176 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:56Z\\\",\\\"message\\\":\\\"vices_controller.go:356] Processing sync for service openshift-marketplace/certified-operators for network=default\\\\nI0102 00:07:56.529116 6383 lb_config.go:1031] Cluster endpoints for openshift-machine-config-operator/machine-config-operator for network=default are: map[]\\\\nI0102 00:07:56.529087 6383 services_controller.go:434] Service openshift-marketplace/certified-operators retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{certified-operators openshift-marketplace db855201-4874-4bfa-8608-def310eb5a2a 5471 0 2025-02-23 05:23:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:7FOCZ3GVMQ1pwQKJahWmE09uJDRx6ab8xxcEYE] map[] [{operators.coreos.com/v1alpha1 CatalogSource certified-operators faf75094-01cc-4ebf-8761-1361fa2bf31e 0xc00779338d 0xc00779338e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: certified-operators,olm.managed: true,},ClusterIP:10.217.5.214,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.214],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:57Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.195122 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.195185 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.195205 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.195226 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.195240 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:57Z","lastTransitionTime":"2026-01-02T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.199154 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:57Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.211815 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:57Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.222616 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:57Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.232776 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:57Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.246580 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:57Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.257919 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:57Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.275274 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:57Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.299584 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.299617 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.299634 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.299651 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.299665 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:57Z","lastTransitionTime":"2026-01-02T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.404887 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.404950 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.404968 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.404992 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.405009 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:57Z","lastTransitionTime":"2026-01-02T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.508358 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.508418 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.508435 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.508457 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.508474 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:57Z","lastTransitionTime":"2026-01-02T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.613431 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.613558 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.613583 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.613616 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.613640 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:57Z","lastTransitionTime":"2026-01-02T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.716356 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.716404 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.716418 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.716435 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.716448 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:57Z","lastTransitionTime":"2026-01-02T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.819676 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.819745 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.819767 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.819792 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.819812 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:57Z","lastTransitionTime":"2026-01-02T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.923210 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.923279 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.923296 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.923320 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:57 crc kubenswrapper[4766]: I0102 00:07:57.923338 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:57Z","lastTransitionTime":"2026-01-02T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.012049 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovnkube-controller/2.log" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.020064 4766 scope.go:117] "RemoveContainer" containerID="eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad" Jan 02 00:07:58 crc kubenswrapper[4766]: E0102 00:07:58.020339 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.026455 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.026504 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.026542 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.026561 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.026572 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:58Z","lastTransitionTime":"2026-01-02T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.042784 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62b90892-e222-44ae-8145-d377c00e8df9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a27f65aeb508c6780fd6af38a7850a088580d9434e4149e959247cf694b3751\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://203b2285af60ff595b7712de3fbac8ef7ef3f3a1309a904615ea3465b7441d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fc7bf3d6725b377ad3961d854087aa94ab8584c9e9fd376100c030e9a04d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:58Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.064417 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:58Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.083627 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:58Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.106796 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:58Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.121322 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:58Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.130003 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.130052 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.130066 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.130084 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.130098 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:58Z","lastTransitionTime":"2026-01-02T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.133812 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:58Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.146648 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:58Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.160413 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:58Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.177955 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:56Z\\\",\\\"message\\\":\\\"vices_controller.go:356] Processing sync for service openshift-marketplace/certified-operators for network=default\\\\nI0102 00:07:56.529116 6383 lb_config.go:1031] Cluster endpoints for openshift-machine-config-operator/machine-config-operator for network=default are: map[]\\\\nI0102 00:07:56.529087 6383 services_controller.go:434] Service openshift-marketplace/certified-operators retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{certified-operators openshift-marketplace db855201-4874-4bfa-8608-def310eb5a2a 5471 0 2025-02-23 05:23:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:7FOCZ3GVMQ1pwQKJahWmE09uJDRx6ab8xxcEYE] map[] [{operators.coreos.com/v1alpha1 CatalogSource certified-operators faf75094-01cc-4ebf-8761-1361fa2bf31e 0xc00779338d 0xc00779338e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: certified-operators,olm.managed: true,},ClusterIP:10.217.5.214,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.214],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:58Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.203190 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:58Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.217933 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:58Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.232463 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.232531 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.232548 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.232573 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.232590 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:58Z","lastTransitionTime":"2026-01-02T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.237674 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:58Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.254211 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:58Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.266638 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:58Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.282185 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:58Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.297660 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:58Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.319426 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:58Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.335597 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.335670 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.335688 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.335713 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.335774 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:58Z","lastTransitionTime":"2026-01-02T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.438986 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.439036 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.439047 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.439068 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.439080 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:58Z","lastTransitionTime":"2026-01-02T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.533774 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.533946 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.533734 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.533807 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:07:58 crc kubenswrapper[4766]: E0102 00:07:58.534059 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:07:58 crc kubenswrapper[4766]: E0102 00:07:58.534245 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:07:58 crc kubenswrapper[4766]: E0102 00:07:58.534290 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:07:58 crc kubenswrapper[4766]: E0102 00:07:58.534376 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.542332 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.542391 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.542409 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.542434 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.542453 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:58Z","lastTransitionTime":"2026-01-02T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.644945 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.645016 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.645034 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.645057 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.645076 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:58Z","lastTransitionTime":"2026-01-02T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.749363 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.749413 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.749430 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.749452 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.749470 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:58Z","lastTransitionTime":"2026-01-02T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.852838 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.852951 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.852970 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.852996 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.853013 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:58Z","lastTransitionTime":"2026-01-02T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.955827 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.955948 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.955972 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.956000 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:58 crc kubenswrapper[4766]: I0102 00:07:58.956021 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:58Z","lastTransitionTime":"2026-01-02T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.059396 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.059490 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.059512 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.059546 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.059566 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:59Z","lastTransitionTime":"2026-01-02T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.163808 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.163968 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.163989 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.164017 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.164037 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:59Z","lastTransitionTime":"2026-01-02T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.267084 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.267133 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.267153 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.267178 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.267196 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:59Z","lastTransitionTime":"2026-01-02T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.369741 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.369826 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.369881 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.369914 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.369941 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:59Z","lastTransitionTime":"2026-01-02T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:59 crc kubenswrapper[4766]: E0102 00:07:59.395206 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.401424 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.401471 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.401488 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.401513 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.401535 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:59Z","lastTransitionTime":"2026-01-02T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:59 crc kubenswrapper[4766]: E0102 00:07:59.440111 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.447124 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.447210 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.447238 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.447274 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.447299 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:59Z","lastTransitionTime":"2026-01-02T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:59 crc kubenswrapper[4766]: E0102 00:07:59.466033 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.473385 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.473431 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.473443 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.473464 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.473476 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:59Z","lastTransitionTime":"2026-01-02T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:59 crc kubenswrapper[4766]: E0102 00:07:59.493221 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.497333 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.497388 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.497402 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.497425 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.497439 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:59Z","lastTransitionTime":"2026-01-02T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:59 crc kubenswrapper[4766]: E0102 00:07:59.513309 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 02 00:07:59 crc kubenswrapper[4766]: E0102 00:07:59.513440 4766 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.515320 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.515374 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.515391 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.515412 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.515428 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:59Z","lastTransitionTime":"2026-01-02T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.618028 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.618069 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.618079 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.618094 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.618104 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:59Z","lastTransitionTime":"2026-01-02T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.720493 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.720543 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.720554 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.720575 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.720588 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:59Z","lastTransitionTime":"2026-01-02T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.824043 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.824113 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.824130 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.824164 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.824182 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:59Z","lastTransitionTime":"2026-01-02T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.927388 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.927452 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.927470 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.927502 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:07:59 crc kubenswrapper[4766]: I0102 00:07:59.927551 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:07:59Z","lastTransitionTime":"2026-01-02T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.030026 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.030106 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.030134 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.030161 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.030182 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:00Z","lastTransitionTime":"2026-01-02T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.133087 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.133174 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.133206 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.133235 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.133256 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:00Z","lastTransitionTime":"2026-01-02T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.236464 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.236544 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.236567 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.236592 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.236609 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:00Z","lastTransitionTime":"2026-01-02T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.340398 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.340472 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.341414 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.341473 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.341492 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:00Z","lastTransitionTime":"2026-01-02T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.445114 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.445185 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.445203 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.445226 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.445243 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:00Z","lastTransitionTime":"2026-01-02T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.533606 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.533725 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:00 crc kubenswrapper[4766]: E0102 00:08:00.533894 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.533948 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:00 crc kubenswrapper[4766]: E0102 00:08:00.534162 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.534273 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:00 crc kubenswrapper[4766]: E0102 00:08:00.534519 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:00 crc kubenswrapper[4766]: E0102 00:08:00.534722 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.551459 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.551562 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.551585 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.551615 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.551648 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:00Z","lastTransitionTime":"2026-01-02T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.654310 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.654386 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.654405 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.654446 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.654467 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:00Z","lastTransitionTime":"2026-01-02T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.758598 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.758661 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.758673 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.758698 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.758713 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:00Z","lastTransitionTime":"2026-01-02T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.862142 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.862227 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.862249 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.862278 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.862300 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:00Z","lastTransitionTime":"2026-01-02T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.965905 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.965995 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.966021 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.966051 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:00 crc kubenswrapper[4766]: I0102 00:08:00.966074 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:00Z","lastTransitionTime":"2026-01-02T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.070019 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.070078 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.070088 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.070111 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.070122 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:01Z","lastTransitionTime":"2026-01-02T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.172735 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.172807 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.172824 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.172876 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.172895 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:01Z","lastTransitionTime":"2026-01-02T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.275940 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.275997 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.276013 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.276035 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.276049 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:01Z","lastTransitionTime":"2026-01-02T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.379621 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.379685 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.379697 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.379719 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.379735 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:01Z","lastTransitionTime":"2026-01-02T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.483913 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.483979 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.484001 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.484035 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.484059 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:01Z","lastTransitionTime":"2026-01-02T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.587057 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.587110 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.587127 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.587151 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.587169 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:01Z","lastTransitionTime":"2026-01-02T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.689769 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.689835 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.689907 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.689943 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.689970 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:01Z","lastTransitionTime":"2026-01-02T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.793189 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.793248 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.793259 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.793276 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.793288 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:01Z","lastTransitionTime":"2026-01-02T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.896690 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.896758 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.896781 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.896811 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:01 crc kubenswrapper[4766]: I0102 00:08:01.896829 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:01Z","lastTransitionTime":"2026-01-02T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.000415 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.000489 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.000509 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.000535 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.000554 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:02Z","lastTransitionTime":"2026-01-02T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.104495 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.104695 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.104786 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.104825 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.104891 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:02Z","lastTransitionTime":"2026-01-02T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.207904 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.207985 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.208005 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.208042 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.208062 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:02Z","lastTransitionTime":"2026-01-02T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.311729 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.311789 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.311801 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.311820 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.311832 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:02Z","lastTransitionTime":"2026-01-02T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.414755 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.414824 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.414884 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.414915 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.414934 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:02Z","lastTransitionTime":"2026-01-02T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.517262 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.517334 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.517351 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.517378 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.517396 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:02Z","lastTransitionTime":"2026-01-02T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.533313 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.533354 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.533320 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:02 crc kubenswrapper[4766]: E0102 00:08:02.533501 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.533320 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:02 crc kubenswrapper[4766]: E0102 00:08:02.533755 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:02 crc kubenswrapper[4766]: E0102 00:08:02.533925 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:02 crc kubenswrapper[4766]: E0102 00:08:02.534092 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.621164 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.621232 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.621252 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.621279 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.621297 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:02Z","lastTransitionTime":"2026-01-02T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.724213 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.724281 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.724303 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.724331 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.724355 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:02Z","lastTransitionTime":"2026-01-02T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.828608 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.828723 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.828745 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.828771 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.828793 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:02Z","lastTransitionTime":"2026-01-02T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.933185 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.933271 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.933292 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.933332 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:02 crc kubenswrapper[4766]: I0102 00:08:02.933356 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:02Z","lastTransitionTime":"2026-01-02T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.036650 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.036734 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.036754 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.036785 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.036807 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:03Z","lastTransitionTime":"2026-01-02T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.140292 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.140361 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.140380 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.140407 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.140425 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:03Z","lastTransitionTime":"2026-01-02T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.242652 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.242702 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.242713 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.242731 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.242743 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:03Z","lastTransitionTime":"2026-01-02T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.345687 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.345766 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.345779 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.345804 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.345818 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:03Z","lastTransitionTime":"2026-01-02T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.449158 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.449239 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.449262 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.449305 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.449328 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:03Z","lastTransitionTime":"2026-01-02T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.551631 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.551691 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.551708 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.551730 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.551748 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:03Z","lastTransitionTime":"2026-01-02T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.654518 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.654555 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.654565 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.654579 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.654591 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:03Z","lastTransitionTime":"2026-01-02T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.757527 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.757609 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.757625 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.757649 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.757665 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:03Z","lastTransitionTime":"2026-01-02T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.860294 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.860361 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.860380 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.860406 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.860424 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:03Z","lastTransitionTime":"2026-01-02T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.963633 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.963705 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.963727 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.963755 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:03 crc kubenswrapper[4766]: I0102 00:08:03.963775 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:03Z","lastTransitionTime":"2026-01-02T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.070380 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.070471 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.070537 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.070566 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.070653 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:04Z","lastTransitionTime":"2026-01-02T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.173609 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.173673 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.173690 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.173713 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.173732 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:04Z","lastTransitionTime":"2026-01-02T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.276923 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.276978 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.277000 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.277029 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.277051 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:04Z","lastTransitionTime":"2026-01-02T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.380357 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.380422 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.380439 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.380462 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.380479 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:04Z","lastTransitionTime":"2026-01-02T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.484035 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.484111 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.484128 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.484158 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.484173 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:04Z","lastTransitionTime":"2026-01-02T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.534049 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.534109 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.534780 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:04 crc kubenswrapper[4766]: E0102 00:08:04.535072 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.535093 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:04 crc kubenswrapper[4766]: E0102 00:08:04.535422 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:04 crc kubenswrapper[4766]: E0102 00:08:04.535577 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:04 crc kubenswrapper[4766]: E0102 00:08:04.535694 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.587516 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.587591 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.587618 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.587650 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.587675 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:04Z","lastTransitionTime":"2026-01-02T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.690624 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.690680 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.690696 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.690719 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.690736 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:04Z","lastTransitionTime":"2026-01-02T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.794146 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.794238 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.794460 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.794480 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.794496 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:04Z","lastTransitionTime":"2026-01-02T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.897501 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.897565 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.897586 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.897611 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:04 crc kubenswrapper[4766]: I0102 00:08:04.897627 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:04Z","lastTransitionTime":"2026-01-02T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.001238 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.001297 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.001306 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.001322 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.001332 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:05Z","lastTransitionTime":"2026-01-02T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.104746 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.104819 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.104878 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.104914 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.104942 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:05Z","lastTransitionTime":"2026-01-02T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.207733 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.207795 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.207812 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.207836 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.207893 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:05Z","lastTransitionTime":"2026-01-02T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.311307 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.311374 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.311407 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.311430 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.311448 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:05Z","lastTransitionTime":"2026-01-02T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.414831 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.414893 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.414904 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.414923 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.414935 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:05Z","lastTransitionTime":"2026-01-02T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.517526 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.517577 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.517589 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.517607 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.517622 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:05Z","lastTransitionTime":"2026-01-02T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.549971 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.562686 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.581736 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.599378 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.619584 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.619631 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.619646 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.619669 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.619685 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:05Z","lastTransitionTime":"2026-01-02T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.620834 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.636828 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.650436 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.696404 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62b90892-e222-44ae-8145-d377c00e8df9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a27f65aeb508c6780fd6af38a7850a088580d9434e4149e959247cf694b3751\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://203b2285af60ff595b7712de3fbac8ef7ef3f3a1309a904615ea3465b7441d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fc7bf3d6725b377ad3961d854087aa94ab8584c9e9fd376100c030e9a04d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.710228 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.722888 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.722922 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.722935 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.722953 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.722964 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:05Z","lastTransitionTime":"2026-01-02T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.734737 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.758001 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:56Z\\\",\\\"message\\\":\\\"vices_controller.go:356] Processing sync for service openshift-marketplace/certified-operators for network=default\\\\nI0102 00:07:56.529116 6383 lb_config.go:1031] Cluster endpoints for openshift-machine-config-operator/machine-config-operator for network=default are: map[]\\\\nI0102 00:07:56.529087 6383 services_controller.go:434] Service openshift-marketplace/certified-operators retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{certified-operators openshift-marketplace db855201-4874-4bfa-8608-def310eb5a2a 5471 0 2025-02-23 05:23:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:7FOCZ3GVMQ1pwQKJahWmE09uJDRx6ab8xxcEYE] map[] [{operators.coreos.com/v1alpha1 CatalogSource certified-operators faf75094-01cc-4ebf-8761-1361fa2bf31e 0xc00779338d 0xc00779338e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: certified-operators,olm.managed: true,},ClusterIP:10.217.5.214,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.214],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.773496 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.785882 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.800879 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.813084 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.825911 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.825949 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.825960 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.825977 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.825988 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:05Z","lastTransitionTime":"2026-01-02T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.830632 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.847725 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.928703 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.928792 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.928810 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.928872 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:05 crc kubenswrapper[4766]: I0102 00:08:05.928893 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:05Z","lastTransitionTime":"2026-01-02T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.032818 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.032888 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.032901 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.032920 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.032952 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:06Z","lastTransitionTime":"2026-01-02T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.136654 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.136717 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.136730 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.136752 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.136765 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:06Z","lastTransitionTime":"2026-01-02T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.241832 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.242451 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.242468 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.242494 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.242513 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:06Z","lastTransitionTime":"2026-01-02T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.345946 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.346021 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.346040 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.346069 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.346105 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:06Z","lastTransitionTime":"2026-01-02T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.449760 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.449862 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.449878 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.449901 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.449921 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:06Z","lastTransitionTime":"2026-01-02T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.534188 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.534337 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.534385 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.534349 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:06 crc kubenswrapper[4766]: E0102 00:08:06.534582 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:06 crc kubenswrapper[4766]: E0102 00:08:06.534759 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:06 crc kubenswrapper[4766]: E0102 00:08:06.535016 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:06 crc kubenswrapper[4766]: E0102 00:08:06.535156 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.554271 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.554432 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.554465 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.554546 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.554623 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:06Z","lastTransitionTime":"2026-01-02T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.658449 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.658488 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.658503 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.658520 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.658532 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:06Z","lastTransitionTime":"2026-01-02T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.762229 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.762302 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.762316 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.762339 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.762354 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:06Z","lastTransitionTime":"2026-01-02T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.864478 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.864521 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.864533 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.864551 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.864561 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:06Z","lastTransitionTime":"2026-01-02T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.967249 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.967331 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.967347 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.967390 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:06 crc kubenswrapper[4766]: I0102 00:08:06.967402 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:06Z","lastTransitionTime":"2026-01-02T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.069625 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.069661 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.069672 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.069688 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.069703 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:07Z","lastTransitionTime":"2026-01-02T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.172355 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.172430 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.172449 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.172476 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.172497 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:07Z","lastTransitionTime":"2026-01-02T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.276194 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.276269 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.276298 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.276333 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.276357 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:07Z","lastTransitionTime":"2026-01-02T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.379132 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.379187 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.379205 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.379229 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.379254 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:07Z","lastTransitionTime":"2026-01-02T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.482336 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.482403 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.482427 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.482456 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.482479 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:07Z","lastTransitionTime":"2026-01-02T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.585442 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.585497 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.585507 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.585529 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.585542 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:07Z","lastTransitionTime":"2026-01-02T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.688924 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.688983 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.689001 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.689024 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.689041 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:07Z","lastTransitionTime":"2026-01-02T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.792379 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.792439 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.792455 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.792478 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.792495 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:07Z","lastTransitionTime":"2026-01-02T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.896508 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.896805 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.896821 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.896873 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:07 crc kubenswrapper[4766]: I0102 00:08:07.896892 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:07Z","lastTransitionTime":"2026-01-02T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.000696 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.000751 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.000784 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.000808 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.000820 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:08Z","lastTransitionTime":"2026-01-02T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.104494 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.104576 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.104616 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.104648 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.104670 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:08Z","lastTransitionTime":"2026-01-02T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.207909 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.207971 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.207993 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.208022 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.208043 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:08Z","lastTransitionTime":"2026-01-02T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.312357 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.312419 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.312438 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.312466 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.312486 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:08Z","lastTransitionTime":"2026-01-02T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.415439 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.415491 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.415505 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.415525 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.415537 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:08Z","lastTransitionTime":"2026-01-02T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.517697 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.517756 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.517791 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.517896 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.517922 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:08Z","lastTransitionTime":"2026-01-02T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.533737 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.533780 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:08 crc kubenswrapper[4766]: E0102 00:08:08.533955 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.533971 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:08 crc kubenswrapper[4766]: E0102 00:08:08.534106 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.534200 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:08 crc kubenswrapper[4766]: E0102 00:08:08.534294 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:08 crc kubenswrapper[4766]: E0102 00:08:08.534380 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.621334 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.621403 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.621427 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.621457 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.621499 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:08Z","lastTransitionTime":"2026-01-02T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.724589 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.724648 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.724666 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.724688 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.724704 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:08Z","lastTransitionTime":"2026-01-02T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.828106 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.828141 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.828152 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.828167 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.828178 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:08Z","lastTransitionTime":"2026-01-02T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.930612 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.930655 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.930666 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.930681 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:08 crc kubenswrapper[4766]: I0102 00:08:08.930691 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:08Z","lastTransitionTime":"2026-01-02T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.033980 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.034052 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.034072 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.034100 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.034118 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:09Z","lastTransitionTime":"2026-01-02T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.136911 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.136972 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.136990 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.137014 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.137032 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:09Z","lastTransitionTime":"2026-01-02T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.239528 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.239596 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.239613 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.239636 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.239653 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:09Z","lastTransitionTime":"2026-01-02T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.341819 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.341898 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.341910 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.341924 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.341937 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:09Z","lastTransitionTime":"2026-01-02T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.444884 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.444954 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.444979 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.445056 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.445124 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:09Z","lastTransitionTime":"2026-01-02T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.548000 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.548043 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.548055 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.548068 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.548078 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:09Z","lastTransitionTime":"2026-01-02T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.577388 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.577462 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.577477 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.577492 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.577518 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:09Z","lastTransitionTime":"2026-01-02T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:09 crc kubenswrapper[4766]: E0102 00:08:09.595583 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.600803 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.600873 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.600883 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.600897 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.600908 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:09Z","lastTransitionTime":"2026-01-02T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:09 crc kubenswrapper[4766]: E0102 00:08:09.616825 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.621449 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.621491 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.621500 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.621513 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.621523 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:09Z","lastTransitionTime":"2026-01-02T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:09 crc kubenswrapper[4766]: E0102 00:08:09.635054 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.638629 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.638654 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.638770 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.638784 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.638794 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:09Z","lastTransitionTime":"2026-01-02T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:09 crc kubenswrapper[4766]: E0102 00:08:09.651612 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.654928 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.654963 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.654973 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.654988 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.655016 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:09Z","lastTransitionTime":"2026-01-02T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:09 crc kubenswrapper[4766]: E0102 00:08:09.666974 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:09 crc kubenswrapper[4766]: E0102 00:08:09.667094 4766 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.668798 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.668872 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.668896 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.668916 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.668928 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:09Z","lastTransitionTime":"2026-01-02T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.771421 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.771464 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.771475 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.771490 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.771500 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:09Z","lastTransitionTime":"2026-01-02T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.873710 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.873767 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.873784 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.873809 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.873887 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:09Z","lastTransitionTime":"2026-01-02T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.976658 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.976709 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.976726 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.976752 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:09 crc kubenswrapper[4766]: I0102 00:08:09.976772 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:09Z","lastTransitionTime":"2026-01-02T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.079628 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.079673 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.079687 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.079703 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.079715 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:10Z","lastTransitionTime":"2026-01-02T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.182649 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.182688 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.182696 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.182711 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.182721 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:10Z","lastTransitionTime":"2026-01-02T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.285886 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.285922 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.285931 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.285945 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.285956 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:10Z","lastTransitionTime":"2026-01-02T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.387649 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.387697 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.387710 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.387728 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.387739 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:10Z","lastTransitionTime":"2026-01-02T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.490009 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.490062 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.490075 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.490093 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.490103 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:10Z","lastTransitionTime":"2026-01-02T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.533880 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.533945 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.533951 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.533951 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:10 crc kubenswrapper[4766]: E0102 00:08:10.534084 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:10 crc kubenswrapper[4766]: E0102 00:08:10.534259 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:10 crc kubenswrapper[4766]: E0102 00:08:10.534418 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:10 crc kubenswrapper[4766]: E0102 00:08:10.534504 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.592828 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.592927 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.592936 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.592952 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.592980 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:10Z","lastTransitionTime":"2026-01-02T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.695678 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.695719 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.695728 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.695742 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.695751 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:10Z","lastTransitionTime":"2026-01-02T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.797962 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.798035 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.798059 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.798087 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.798104 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:10Z","lastTransitionTime":"2026-01-02T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.901059 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.901102 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.901110 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.901127 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:10 crc kubenswrapper[4766]: I0102 00:08:10.901136 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:10Z","lastTransitionTime":"2026-01-02T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.002977 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.003025 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.003035 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.003052 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.003062 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:11Z","lastTransitionTime":"2026-01-02T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.104967 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.105027 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.105043 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.105065 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.105082 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:11Z","lastTransitionTime":"2026-01-02T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.207156 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.207212 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.207229 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.207251 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.207269 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:11Z","lastTransitionTime":"2026-01-02T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.309225 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.309268 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.309283 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.309299 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.309310 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:11Z","lastTransitionTime":"2026-01-02T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.411815 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.411885 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.411897 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.411917 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.411957 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:11Z","lastTransitionTime":"2026-01-02T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.513648 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.513718 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.513735 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.513763 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.513783 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:11Z","lastTransitionTime":"2026-01-02T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.615631 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.615667 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.615677 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.615690 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.615701 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:11Z","lastTransitionTime":"2026-01-02T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.718821 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.718907 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.718925 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.718950 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.718968 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:11Z","lastTransitionTime":"2026-01-02T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.821396 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.821463 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.821480 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.821504 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.821522 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:11Z","lastTransitionTime":"2026-01-02T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.924092 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.924135 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.924146 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.924165 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:11 crc kubenswrapper[4766]: I0102 00:08:11.924178 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:11Z","lastTransitionTime":"2026-01-02T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.027099 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.027178 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.027200 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.027230 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.027253 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:12Z","lastTransitionTime":"2026-01-02T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.130629 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.130665 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.130676 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.130699 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.130708 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:12Z","lastTransitionTime":"2026-01-02T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.233451 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.233499 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.233516 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.233539 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.233556 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:12Z","lastTransitionTime":"2026-01-02T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.336371 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.336417 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.336428 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.336443 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.336453 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:12Z","lastTransitionTime":"2026-01-02T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.391742 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs\") pod \"network-metrics-daemon-zd2t8\" (UID: \"f869b527-abf2-400f-90c2-ebee595564a2\") " pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:12 crc kubenswrapper[4766]: E0102 00:08:12.391950 4766 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 02 00:08:12 crc kubenswrapper[4766]: E0102 00:08:12.392050 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs podName:f869b527-abf2-400f-90c2-ebee595564a2 nodeName:}" failed. No retries permitted until 2026-01-02 00:08:44.392029814 +0000 UTC m=+99.190517048 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs") pod "network-metrics-daemon-zd2t8" (UID: "f869b527-abf2-400f-90c2-ebee595564a2") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.438835 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.438949 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.438966 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.438989 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.439008 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:12Z","lastTransitionTime":"2026-01-02T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.534030 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.534141 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.534218 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:12 crc kubenswrapper[4766]: E0102 00:08:12.534220 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:12 crc kubenswrapper[4766]: E0102 00:08:12.534350 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.534439 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:12 crc kubenswrapper[4766]: E0102 00:08:12.534645 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:12 crc kubenswrapper[4766]: E0102 00:08:12.534559 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.541148 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.541190 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.541205 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.541238 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.541254 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:12Z","lastTransitionTime":"2026-01-02T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.643185 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.643233 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.643241 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.643255 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.643264 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:12Z","lastTransitionTime":"2026-01-02T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.745810 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.745879 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.745888 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.745901 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.745910 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:12Z","lastTransitionTime":"2026-01-02T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.848079 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.848115 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.848123 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.848137 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.848146 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:12Z","lastTransitionTime":"2026-01-02T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.950909 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.950955 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.950967 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.950983 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:12 crc kubenswrapper[4766]: I0102 00:08:12.950995 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:12Z","lastTransitionTime":"2026-01-02T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.053517 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.053572 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.053583 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.053602 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.053614 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:13Z","lastTransitionTime":"2026-01-02T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.156483 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.156531 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.156546 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.156565 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.156581 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:13Z","lastTransitionTime":"2026-01-02T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.259516 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.259565 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.259577 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.259592 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.259601 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:13Z","lastTransitionTime":"2026-01-02T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.362324 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.362368 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.362377 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.362393 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.362404 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:13Z","lastTransitionTime":"2026-01-02T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.465029 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.465091 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.465111 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.465136 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.465153 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:13Z","lastTransitionTime":"2026-01-02T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.535488 4766 scope.go:117] "RemoveContainer" containerID="eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad" Jan 02 00:08:13 crc kubenswrapper[4766]: E0102 00:08:13.535952 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.568528 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.568578 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.568587 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.568603 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.568613 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:13Z","lastTransitionTime":"2026-01-02T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.671500 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.671570 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.671594 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.671624 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.671648 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:13Z","lastTransitionTime":"2026-01-02T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.774278 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.774367 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.774393 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.774491 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.774523 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:13Z","lastTransitionTime":"2026-01-02T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.876574 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.876623 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.876639 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.876658 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.876673 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:13Z","lastTransitionTime":"2026-01-02T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.978584 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.978623 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.978633 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.978645 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:13 crc kubenswrapper[4766]: I0102 00:08:13.978653 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:13Z","lastTransitionTime":"2026-01-02T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.078602 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lmc2p_adc58583-1116-4066-9ca8-da659b367839/kube-multus/0.log" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.078723 4766 generic.go:334] "Generic (PLEG): container finished" podID="adc58583-1116-4066-9ca8-da659b367839" containerID="f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d" exitCode=1 Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.078782 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lmc2p" event={"ID":"adc58583-1116-4066-9ca8-da659b367839","Type":"ContainerDied","Data":"f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d"} Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.079518 4766 scope.go:117] "RemoveContainer" containerID="f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.081920 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.081964 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.081975 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.081990 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.082001 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:14Z","lastTransitionTime":"2026-01-02T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.096238 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.115404 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:08:13Z\\\",\\\"message\\\":\\\"2026-01-02T00:07:28+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8318a030-49b7-4b5a-8221-3c132dc1f46c\\\\n2026-01-02T00:07:28+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8318a030-49b7-4b5a-8221-3c132dc1f46c to /host/opt/cni/bin/\\\\n2026-01-02T00:07:28Z [verbose] multus-daemon started\\\\n2026-01-02T00:07:28Z [verbose] Readiness Indicator file check\\\\n2026-01-02T00:08:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.127129 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.138686 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.150755 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62b90892-e222-44ae-8145-d377c00e8df9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a27f65aeb508c6780fd6af38a7850a088580d9434e4149e959247cf694b3751\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://203b2285af60ff595b7712de3fbac8ef7ef3f3a1309a904615ea3465b7441d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fc7bf3d6725b377ad3961d854087aa94ab8584c9e9fd376100c030e9a04d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.164222 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.184768 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.184793 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.184800 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.184814 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.184822 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:14Z","lastTransitionTime":"2026-01-02T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.185522 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.208973 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:56Z\\\",\\\"message\\\":\\\"vices_controller.go:356] Processing sync for service openshift-marketplace/certified-operators for network=default\\\\nI0102 00:07:56.529116 6383 lb_config.go:1031] Cluster endpoints for openshift-machine-config-operator/machine-config-operator for network=default are: map[]\\\\nI0102 00:07:56.529087 6383 services_controller.go:434] Service openshift-marketplace/certified-operators retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{certified-operators openshift-marketplace db855201-4874-4bfa-8608-def310eb5a2a 5471 0 2025-02-23 05:23:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:7FOCZ3GVMQ1pwQKJahWmE09uJDRx6ab8xxcEYE] map[] [{operators.coreos.com/v1alpha1 CatalogSource certified-operators faf75094-01cc-4ebf-8761-1361fa2bf31e 0xc00779338d 0xc00779338e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: certified-operators,olm.managed: true,},ClusterIP:10.217.5.214,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.214],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.223880 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.237322 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.250547 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.260316 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.274063 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.287063 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.287099 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.287108 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.287122 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.287132 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:14Z","lastTransitionTime":"2026-01-02T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.287599 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.299947 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.312012 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.321537 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.389176 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.389206 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.389214 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.389227 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.389235 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:14Z","lastTransitionTime":"2026-01-02T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.491526 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.491693 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.491758 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.491832 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.491920 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:14Z","lastTransitionTime":"2026-01-02T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.533761 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.533788 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:14 crc kubenswrapper[4766]: E0102 00:08:14.533926 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.533867 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.533799 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:14 crc kubenswrapper[4766]: E0102 00:08:14.534120 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:14 crc kubenswrapper[4766]: E0102 00:08:14.534239 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:14 crc kubenswrapper[4766]: E0102 00:08:14.534107 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.594575 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.595334 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.595398 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.595468 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.595526 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:14Z","lastTransitionTime":"2026-01-02T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.698222 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.698273 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.698286 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.698303 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.698317 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:14Z","lastTransitionTime":"2026-01-02T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.799791 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.799823 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.799831 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.799862 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.799872 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:14Z","lastTransitionTime":"2026-01-02T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.903302 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.903555 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.903636 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.903746 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:14 crc kubenswrapper[4766]: I0102 00:08:14.903777 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:14Z","lastTransitionTime":"2026-01-02T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.005897 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.005925 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.005933 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.005944 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.005953 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:15Z","lastTransitionTime":"2026-01-02T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.085947 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lmc2p_adc58583-1116-4066-9ca8-da659b367839/kube-multus/0.log" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.086044 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lmc2p" event={"ID":"adc58583-1116-4066-9ca8-da659b367839","Type":"ContainerStarted","Data":"76e33f463186e4646a6c1af42efed221a9d759604e9de18c83e8eaaf1d52a1ea"} Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.103753 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.108628 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.108688 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.108706 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.108731 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.108749 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:15Z","lastTransitionTime":"2026-01-02T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.113225 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.124302 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.135587 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.146260 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62b90892-e222-44ae-8145-d377c00e8df9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a27f65aeb508c6780fd6af38a7850a088580d9434e4149e959247cf694b3751\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://203b2285af60ff595b7712de3fbac8ef7ef3f3a1309a904615ea3465b7441d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fc7bf3d6725b377ad3961d854087aa94ab8584c9e9fd376100c030e9a04d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.158031 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.168808 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.181484 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76e33f463186e4646a6c1af42efed221a9d759604e9de18c83e8eaaf1d52a1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:08:13Z\\\",\\\"message\\\":\\\"2026-01-02T00:07:28+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8318a030-49b7-4b5a-8221-3c132dc1f46c\\\\n2026-01-02T00:07:28+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8318a030-49b7-4b5a-8221-3c132dc1f46c to /host/opt/cni/bin/\\\\n2026-01-02T00:07:28Z [verbose] multus-daemon started\\\\n2026-01-02T00:07:28Z [verbose] Readiness Indicator file check\\\\n2026-01-02T00:08:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.194360 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.208821 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.211338 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.211370 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.211379 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.211394 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.211404 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:15Z","lastTransitionTime":"2026-01-02T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.227063 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.247493 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:56Z\\\",\\\"message\\\":\\\"vices_controller.go:356] Processing sync for service openshift-marketplace/certified-operators for network=default\\\\nI0102 00:07:56.529116 6383 lb_config.go:1031] Cluster endpoints for openshift-machine-config-operator/machine-config-operator for network=default are: map[]\\\\nI0102 00:07:56.529087 6383 services_controller.go:434] Service openshift-marketplace/certified-operators retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{certified-operators openshift-marketplace db855201-4874-4bfa-8608-def310eb5a2a 5471 0 2025-02-23 05:23:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:7FOCZ3GVMQ1pwQKJahWmE09uJDRx6ab8xxcEYE] map[] [{operators.coreos.com/v1alpha1 CatalogSource certified-operators faf75094-01cc-4ebf-8761-1361fa2bf31e 0xc00779338d 0xc00779338e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: certified-operators,olm.managed: true,},ClusterIP:10.217.5.214,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.214],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.261211 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.275330 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.289438 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.301381 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.314180 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.315028 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.315117 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.315142 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.315175 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.315199 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:15Z","lastTransitionTime":"2026-01-02T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.418183 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.418244 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.418261 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.418287 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.418303 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:15Z","lastTransitionTime":"2026-01-02T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.521652 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.521744 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.521766 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.521793 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.521873 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:15Z","lastTransitionTime":"2026-01-02T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.548754 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.561636 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.576734 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62b90892-e222-44ae-8145-d377c00e8df9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a27f65aeb508c6780fd6af38a7850a088580d9434e4149e959247cf694b3751\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://203b2285af60ff595b7712de3fbac8ef7ef3f3a1309a904615ea3465b7441d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fc7bf3d6725b377ad3961d854087aa94ab8584c9e9fd376100c030e9a04d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.595213 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.607569 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.623977 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.624062 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.624138 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.624249 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.624589 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:15Z","lastTransitionTime":"2026-01-02T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.626971 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76e33f463186e4646a6c1af42efed221a9d759604e9de18c83e8eaaf1d52a1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:08:13Z\\\",\\\"message\\\":\\\"2026-01-02T00:07:28+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8318a030-49b7-4b5a-8221-3c132dc1f46c\\\\n2026-01-02T00:07:28+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8318a030-49b7-4b5a-8221-3c132dc1f46c to /host/opt/cni/bin/\\\\n2026-01-02T00:07:28Z [verbose] multus-daemon started\\\\n2026-01-02T00:07:28Z [verbose] Readiness Indicator file check\\\\n2026-01-02T00:08:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.645283 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.656546 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.674783 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.686959 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.710810 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.727284 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.727424 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.727506 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.727597 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.727672 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:15Z","lastTransitionTime":"2026-01-02T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.740944 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:56Z\\\",\\\"message\\\":\\\"vices_controller.go:356] Processing sync for service openshift-marketplace/certified-operators for network=default\\\\nI0102 00:07:56.529116 6383 lb_config.go:1031] Cluster endpoints for openshift-machine-config-operator/machine-config-operator for network=default are: map[]\\\\nI0102 00:07:56.529087 6383 services_controller.go:434] Service openshift-marketplace/certified-operators retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{certified-operators openshift-marketplace db855201-4874-4bfa-8608-def310eb5a2a 5471 0 2025-02-23 05:23:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:7FOCZ3GVMQ1pwQKJahWmE09uJDRx6ab8xxcEYE] map[] [{operators.coreos.com/v1alpha1 CatalogSource certified-operators faf75094-01cc-4ebf-8761-1361fa2bf31e 0xc00779338d 0xc00779338e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: certified-operators,olm.managed: true,},ClusterIP:10.217.5.214,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.214],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.759074 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.776859 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.793506 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.809301 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.819512 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:15Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.830263 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.830300 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.830311 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.830328 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.830341 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:15Z","lastTransitionTime":"2026-01-02T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.931904 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.931936 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.931944 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.931957 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:15 crc kubenswrapper[4766]: I0102 00:08:15.931965 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:15Z","lastTransitionTime":"2026-01-02T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.034197 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.034266 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.034286 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.034311 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.034329 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:16Z","lastTransitionTime":"2026-01-02T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.137237 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.137290 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.137302 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.137323 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.137337 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:16Z","lastTransitionTime":"2026-01-02T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.239684 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.239724 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.239736 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.239750 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.239760 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:16Z","lastTransitionTime":"2026-01-02T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.341621 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.341656 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.341665 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.341677 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.341686 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:16Z","lastTransitionTime":"2026-01-02T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.444304 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.444704 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.444800 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.444931 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.445022 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:16Z","lastTransitionTime":"2026-01-02T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.533758 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.533867 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.533773 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:16 crc kubenswrapper[4766]: E0102 00:08:16.534054 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:16 crc kubenswrapper[4766]: E0102 00:08:16.534196 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.534259 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:16 crc kubenswrapper[4766]: E0102 00:08:16.534375 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:16 crc kubenswrapper[4766]: E0102 00:08:16.534596 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.547705 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.547750 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.547769 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.547793 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.547811 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:16Z","lastTransitionTime":"2026-01-02T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.650399 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.650423 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.650452 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.650466 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.650476 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:16Z","lastTransitionTime":"2026-01-02T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.752705 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.752747 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.752758 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.752774 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.752786 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:16Z","lastTransitionTime":"2026-01-02T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.855494 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.855532 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.855543 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.855558 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.855568 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:16Z","lastTransitionTime":"2026-01-02T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.957581 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.957632 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.957645 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.957663 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:16 crc kubenswrapper[4766]: I0102 00:08:16.957676 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:16Z","lastTransitionTime":"2026-01-02T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.059964 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.060033 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.060058 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.060089 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.060110 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:17Z","lastTransitionTime":"2026-01-02T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.163268 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.163325 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.163343 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.163368 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.163383 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:17Z","lastTransitionTime":"2026-01-02T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.265835 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.265912 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.265925 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.265940 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.265950 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:17Z","lastTransitionTime":"2026-01-02T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.368920 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.368946 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.368974 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.368986 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.368996 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:17Z","lastTransitionTime":"2026-01-02T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.471567 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.471604 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.471616 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.471636 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.471649 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:17Z","lastTransitionTime":"2026-01-02T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.574580 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.574611 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.574619 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.574631 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.574640 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:17Z","lastTransitionTime":"2026-01-02T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.676927 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.676969 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.676981 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.676997 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.677009 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:17Z","lastTransitionTime":"2026-01-02T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.779528 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.779574 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.779587 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.779603 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.779618 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:17Z","lastTransitionTime":"2026-01-02T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.881912 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.881955 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.881963 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.881978 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.881988 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:17Z","lastTransitionTime":"2026-01-02T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.985370 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.985417 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.985434 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.985458 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:17 crc kubenswrapper[4766]: I0102 00:08:17.985475 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:17Z","lastTransitionTime":"2026-01-02T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.088401 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.088464 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.088480 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.088504 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.088518 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:18Z","lastTransitionTime":"2026-01-02T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.190921 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.190974 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.190982 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.190999 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.191009 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:18Z","lastTransitionTime":"2026-01-02T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.294267 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.294315 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.294326 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.294341 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.294349 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:18Z","lastTransitionTime":"2026-01-02T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.397699 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.397822 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.397897 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.397924 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.397940 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:18Z","lastTransitionTime":"2026-01-02T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.500335 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.500403 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.500421 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.500444 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.500464 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:18Z","lastTransitionTime":"2026-01-02T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.533220 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.533277 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.533314 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.533322 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:18 crc kubenswrapper[4766]: E0102 00:08:18.533430 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:18 crc kubenswrapper[4766]: E0102 00:08:18.533709 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:18 crc kubenswrapper[4766]: E0102 00:08:18.533815 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:18 crc kubenswrapper[4766]: E0102 00:08:18.534037 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.547828 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.603715 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.603757 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.603769 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.603785 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.603799 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:18Z","lastTransitionTime":"2026-01-02T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.706282 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.706353 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.706373 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.706398 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.706416 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:18Z","lastTransitionTime":"2026-01-02T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.809520 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.809581 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.809597 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.809615 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.809626 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:18Z","lastTransitionTime":"2026-01-02T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.913390 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.913429 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.913441 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.913459 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:18 crc kubenswrapper[4766]: I0102 00:08:18.913473 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:18Z","lastTransitionTime":"2026-01-02T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.015832 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.015923 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.015940 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.015960 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.015976 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:19Z","lastTransitionTime":"2026-01-02T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.117491 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.117547 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.117564 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.117585 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.117604 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:19Z","lastTransitionTime":"2026-01-02T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.221128 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.221158 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.221169 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.221184 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.221197 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:19Z","lastTransitionTime":"2026-01-02T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.323670 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.323745 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.323769 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.323796 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.323826 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:19Z","lastTransitionTime":"2026-01-02T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.425768 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.426413 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.426427 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.426445 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.426458 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:19Z","lastTransitionTime":"2026-01-02T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.528914 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.528972 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.528988 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.529009 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.529023 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:19Z","lastTransitionTime":"2026-01-02T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.632034 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.632092 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.632109 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.632135 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.632147 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:19Z","lastTransitionTime":"2026-01-02T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.735676 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.735732 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.735752 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.735778 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.735798 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:19Z","lastTransitionTime":"2026-01-02T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.843573 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.843884 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.843986 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.844086 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.844214 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:19Z","lastTransitionTime":"2026-01-02T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.937887 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.937964 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.937981 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.938006 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.938023 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:19Z","lastTransitionTime":"2026-01-02T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:19 crc kubenswrapper[4766]: E0102 00:08:19.959350 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:19Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.969406 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.969579 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.969603 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.969680 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:19 crc kubenswrapper[4766]: I0102 00:08:19.969772 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:19Z","lastTransitionTime":"2026-01-02T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:19 crc kubenswrapper[4766]: E0102 00:08:19.995228 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:19Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.002050 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.002136 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.002159 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.002193 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.002215 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:20Z","lastTransitionTime":"2026-01-02T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:20 crc kubenswrapper[4766]: E0102 00:08:20.026279 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:20Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.037226 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.037261 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.037273 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.037289 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.037301 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:20Z","lastTransitionTime":"2026-01-02T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:20 crc kubenswrapper[4766]: E0102 00:08:20.052449 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:20Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.057463 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.057665 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.057796 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.057948 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.058039 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:20Z","lastTransitionTime":"2026-01-02T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:20 crc kubenswrapper[4766]: E0102 00:08:20.077253 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:20Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:20 crc kubenswrapper[4766]: E0102 00:08:20.077411 4766 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.079141 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.079242 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.079311 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.079390 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.079466 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:20Z","lastTransitionTime":"2026-01-02T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.182460 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.182512 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.182528 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.182547 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.182562 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:20Z","lastTransitionTime":"2026-01-02T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.285596 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.285669 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.285692 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.285761 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.285786 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:20Z","lastTransitionTime":"2026-01-02T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.388834 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.388939 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.388956 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.388981 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.389001 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:20Z","lastTransitionTime":"2026-01-02T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.491446 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.491927 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.492178 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.492337 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.492480 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:20Z","lastTransitionTime":"2026-01-02T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.533585 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.533585 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.533601 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:20 crc kubenswrapper[4766]: E0102 00:08:20.534406 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:20 crc kubenswrapper[4766]: E0102 00:08:20.534189 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.533789 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:20 crc kubenswrapper[4766]: E0102 00:08:20.534523 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:20 crc kubenswrapper[4766]: E0102 00:08:20.534786 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.596424 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.596947 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.597152 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.597317 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.597519 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:20Z","lastTransitionTime":"2026-01-02T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.700291 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.700363 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.700384 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.700414 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.700435 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:20Z","lastTransitionTime":"2026-01-02T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.805100 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.805184 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.805214 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.805243 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.805262 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:20Z","lastTransitionTime":"2026-01-02T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.908353 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.908799 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.908979 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.909111 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:20 crc kubenswrapper[4766]: I0102 00:08:20.909230 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:20Z","lastTransitionTime":"2026-01-02T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.012400 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.012479 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.012503 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.012534 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.012556 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:21Z","lastTransitionTime":"2026-01-02T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.115786 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.116209 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.116355 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.116524 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.116652 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:21Z","lastTransitionTime":"2026-01-02T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.220061 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.220137 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.220158 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.220182 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.220199 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:21Z","lastTransitionTime":"2026-01-02T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.323665 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.323726 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.323743 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.323768 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.323786 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:21Z","lastTransitionTime":"2026-01-02T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.426392 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.426447 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.426464 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.426487 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.426507 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:21Z","lastTransitionTime":"2026-01-02T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.529281 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.529370 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.529393 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.529425 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.529446 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:21Z","lastTransitionTime":"2026-01-02T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.632443 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.632528 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.632552 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.632584 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.632607 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:21Z","lastTransitionTime":"2026-01-02T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.735629 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.735683 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.735700 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.735722 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.735739 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:21Z","lastTransitionTime":"2026-01-02T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.840462 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.840538 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.840558 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.840585 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.840614 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:21Z","lastTransitionTime":"2026-01-02T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.943186 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.943491 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.943658 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.943816 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:21 crc kubenswrapper[4766]: I0102 00:08:21.943992 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:21Z","lastTransitionTime":"2026-01-02T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.047068 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.047411 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.047588 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.047730 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.047916 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:22Z","lastTransitionTime":"2026-01-02T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.150648 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.150723 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.150745 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.150770 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.150787 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:22Z","lastTransitionTime":"2026-01-02T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.254391 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.254485 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.254517 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.254563 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.254595 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:22Z","lastTransitionTime":"2026-01-02T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.357995 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.358075 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.358100 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.358131 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.358150 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:22Z","lastTransitionTime":"2026-01-02T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.461441 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.461527 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.461575 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.461601 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.461618 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:22Z","lastTransitionTime":"2026-01-02T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.533618 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.533700 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:22 crc kubenswrapper[4766]: E0102 00:08:22.533941 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:22 crc kubenswrapper[4766]: E0102 00:08:22.534271 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.534492 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:22 crc kubenswrapper[4766]: E0102 00:08:22.534612 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.534731 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:22 crc kubenswrapper[4766]: E0102 00:08:22.535023 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.566582 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.566666 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.566695 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.566727 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.566746 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:22Z","lastTransitionTime":"2026-01-02T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.670285 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.670351 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.670369 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.670398 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.670417 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:22Z","lastTransitionTime":"2026-01-02T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.773271 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.773325 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.773341 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.773364 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.773380 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:22Z","lastTransitionTime":"2026-01-02T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.876541 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.876592 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.876605 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.876623 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.876634 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:22Z","lastTransitionTime":"2026-01-02T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.980243 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.980307 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.980326 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.980353 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:22 crc kubenswrapper[4766]: I0102 00:08:22.980372 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:22Z","lastTransitionTime":"2026-01-02T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.083416 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.083479 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.083499 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.083525 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.083545 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:23Z","lastTransitionTime":"2026-01-02T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.187071 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.187136 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.187154 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.187181 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.187202 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:23Z","lastTransitionTime":"2026-01-02T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.289950 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.290021 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.290044 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.290073 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.290106 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:23Z","lastTransitionTime":"2026-01-02T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.392742 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.393210 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.393383 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.393578 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.393789 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:23Z","lastTransitionTime":"2026-01-02T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.496556 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.496645 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.496668 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.496699 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.496722 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:23Z","lastTransitionTime":"2026-01-02T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.600248 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.600302 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.600318 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.600345 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.600364 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:23Z","lastTransitionTime":"2026-01-02T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.702767 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.702987 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.703046 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.703105 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.703194 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:23Z","lastTransitionTime":"2026-01-02T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.806096 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.806384 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.806506 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.806638 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.806803 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:23Z","lastTransitionTime":"2026-01-02T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.909061 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.909120 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.909137 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.909160 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:23 crc kubenswrapper[4766]: I0102 00:08:23.909176 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:23Z","lastTransitionTime":"2026-01-02T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.011876 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.011947 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.011971 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.011998 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.012019 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:24Z","lastTransitionTime":"2026-01-02T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.114828 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.114943 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.114971 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.114996 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.115013 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:24Z","lastTransitionTime":"2026-01-02T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.217753 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.217790 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.217800 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.217815 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.217829 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:24Z","lastTransitionTime":"2026-01-02T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.320543 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.320612 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.320629 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.320652 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.320669 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:24Z","lastTransitionTime":"2026-01-02T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.423492 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.423563 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.423581 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.423605 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.423622 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:24Z","lastTransitionTime":"2026-01-02T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.527132 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.527194 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.527211 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.527234 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.527251 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:24Z","lastTransitionTime":"2026-01-02T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.533664 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.533733 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.533694 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.533945 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:24 crc kubenswrapper[4766]: E0102 00:08:24.534014 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:24 crc kubenswrapper[4766]: E0102 00:08:24.534149 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:24 crc kubenswrapper[4766]: E0102 00:08:24.534260 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:24 crc kubenswrapper[4766]: E0102 00:08:24.534387 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.630675 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.630821 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.630881 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.630963 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.630990 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:24Z","lastTransitionTime":"2026-01-02T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.733765 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.733825 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.733870 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.733895 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.733913 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:24Z","lastTransitionTime":"2026-01-02T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.837332 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.837397 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.837415 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.837438 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.837457 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:24Z","lastTransitionTime":"2026-01-02T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.939919 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.939979 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.939996 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.940019 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:24 crc kubenswrapper[4766]: I0102 00:08:24.940039 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:24Z","lastTransitionTime":"2026-01-02T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.043402 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.043470 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.043493 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.043524 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.043541 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:25Z","lastTransitionTime":"2026-01-02T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.154821 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.154929 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.154952 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.154983 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.155004 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:25Z","lastTransitionTime":"2026-01-02T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.258644 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.258708 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.258728 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.258751 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.258769 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:25Z","lastTransitionTime":"2026-01-02T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.361760 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.361824 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.361867 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.361897 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.361915 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:25Z","lastTransitionTime":"2026-01-02T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.465510 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.465624 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.465647 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.465676 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.465696 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:25Z","lastTransitionTime":"2026-01-02T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.552227 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.566695 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.568612 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.568643 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.568652 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.568666 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.568675 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:25Z","lastTransitionTime":"2026-01-02T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.580139 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76e33f463186e4646a6c1af42efed221a9d759604e9de18c83e8eaaf1d52a1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:08:13Z\\\",\\\"message\\\":\\\"2026-01-02T00:07:28+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8318a030-49b7-4b5a-8221-3c132dc1f46c\\\\n2026-01-02T00:07:28+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8318a030-49b7-4b5a-8221-3c132dc1f46c to /host/opt/cni/bin/\\\\n2026-01-02T00:07:28Z [verbose] multus-daemon started\\\\n2026-01-02T00:07:28Z [verbose] Readiness Indicator file check\\\\n2026-01-02T00:08:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.599638 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.616717 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.631422 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a81d2fa-e00b-432c-82da-344be2e8af6b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ce4b7f2bbace6669b4af1d2b6a2aa0dfc734cc543639a61fd362427d2b364ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95f1335934f87803ed602250c386d6847423e6318047599b974707acb04494e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95f1335934f87803ed602250c386d6847423e6318047599b974707acb04494e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.643942 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62b90892-e222-44ae-8145-d377c00e8df9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a27f65aeb508c6780fd6af38a7850a088580d9434e4149e959247cf694b3751\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://203b2285af60ff595b7712de3fbac8ef7ef3f3a1309a904615ea3465b7441d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fc7bf3d6725b377ad3961d854087aa94ab8584c9e9fd376100c030e9a04d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.660664 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.670824 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.670871 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.670883 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.670897 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.670908 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:25Z","lastTransitionTime":"2026-01-02T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.676327 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.699647 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:56Z\\\",\\\"message\\\":\\\"vices_controller.go:356] Processing sync for service openshift-marketplace/certified-operators for network=default\\\\nI0102 00:07:56.529116 6383 lb_config.go:1031] Cluster endpoints for openshift-machine-config-operator/machine-config-operator for network=default are: map[]\\\\nI0102 00:07:56.529087 6383 services_controller.go:434] Service openshift-marketplace/certified-operators retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{certified-operators openshift-marketplace db855201-4874-4bfa-8608-def310eb5a2a 5471 0 2025-02-23 05:23:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:7FOCZ3GVMQ1pwQKJahWmE09uJDRx6ab8xxcEYE] map[] [{operators.coreos.com/v1alpha1 CatalogSource certified-operators faf75094-01cc-4ebf-8761-1361fa2bf31e 0xc00779338d 0xc00779338e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: certified-operators,olm.managed: true,},ClusterIP:10.217.5.214,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.214],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.715686 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.729452 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.741340 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.755325 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.773161 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.773713 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.773733 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.773741 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.773753 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.773763 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:25Z","lastTransitionTime":"2026-01-02T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.791207 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.804163 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.817280 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:25Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.876645 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.876698 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.876715 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.876736 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.876750 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:25Z","lastTransitionTime":"2026-01-02T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.979018 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.979061 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.979069 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.979083 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:25 crc kubenswrapper[4766]: I0102 00:08:25.979093 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:25Z","lastTransitionTime":"2026-01-02T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.081225 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.081280 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.081299 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.081324 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.081341 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:26Z","lastTransitionTime":"2026-01-02T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.184244 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.184316 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.184337 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.184363 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.184385 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:26Z","lastTransitionTime":"2026-01-02T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.287709 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.287765 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.287776 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.287794 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.287808 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:26Z","lastTransitionTime":"2026-01-02T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.391409 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.391457 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.391467 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.391489 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.391499 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:26Z","lastTransitionTime":"2026-01-02T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.495310 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.495369 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.495386 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.495411 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.495430 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:26Z","lastTransitionTime":"2026-01-02T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.533364 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.533456 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.533392 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:26 crc kubenswrapper[4766]: E0102 00:08:26.533580 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:26 crc kubenswrapper[4766]: E0102 00:08:26.533704 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.534261 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:26 crc kubenswrapper[4766]: E0102 00:08:26.534361 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:26 crc kubenswrapper[4766]: E0102 00:08:26.534572 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.558209 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.598778 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.598826 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.598838 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.598871 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.598884 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:26Z","lastTransitionTime":"2026-01-02T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.701777 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.701819 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.701830 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.701868 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.701881 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:26Z","lastTransitionTime":"2026-01-02T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.804256 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.804309 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.804325 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.804349 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.804365 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:26Z","lastTransitionTime":"2026-01-02T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.907863 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.907911 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.907933 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.907952 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:26 crc kubenswrapper[4766]: I0102 00:08:26.907967 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:26Z","lastTransitionTime":"2026-01-02T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.010288 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.010345 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.010361 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.010383 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.010399 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:27Z","lastTransitionTime":"2026-01-02T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.113158 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.113207 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.113224 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.113250 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.113268 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:27Z","lastTransitionTime":"2026-01-02T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.216500 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.216541 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.216549 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.216564 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.216573 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:27Z","lastTransitionTime":"2026-01-02T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.318352 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.318393 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.318409 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.318424 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.318437 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:27Z","lastTransitionTime":"2026-01-02T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.421760 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.421822 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.421884 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.421916 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.421938 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:27Z","lastTransitionTime":"2026-01-02T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.525186 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.525263 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.525281 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.525307 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.525324 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:27Z","lastTransitionTime":"2026-01-02T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.534691 4766 scope.go:117] "RemoveContainer" containerID="eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.627511 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.627556 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.627564 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.627576 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.627584 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:27Z","lastTransitionTime":"2026-01-02T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.730959 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.731045 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.731064 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.731119 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.731138 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:27Z","lastTransitionTime":"2026-01-02T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.835054 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.835154 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.835183 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.835208 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.835230 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:27Z","lastTransitionTime":"2026-01-02T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.938951 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.939053 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.939072 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.939107 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:27 crc kubenswrapper[4766]: I0102 00:08:27.939142 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:27Z","lastTransitionTime":"2026-01-02T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.042600 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.042669 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.042693 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.042725 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.042753 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:28Z","lastTransitionTime":"2026-01-02T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.132514 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovnkube-controller/2.log" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.136470 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerStarted","Data":"e5bf838ca0799a30c0a828e12202e2c66af3473686ae514b8bbd94c369d9ac58"} Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.136975 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.144778 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.144827 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.144876 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.144907 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.144925 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:28Z","lastTransitionTime":"2026-01-02T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.151746 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.174584 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d465f6da-b83d-4306-878a-67ecd3590998\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab50448d0cde9a3982d3106c391537332529754231f74e10b5599f122c56319c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ca99de9b6ec4a85e4ee32254fdf93f846faf036748297d327bd4e0007954215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3345a83fca294debd1ed87181d996d662bc3facf4d4484c64116590d05ac4025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://733b6a898d0c6d48820f54912587d034469a7103af84972716331b1f5d87e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://06e451949779c2c0c34eb41926158609bc00dfce68bfa4d28ca5fd066ca1e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3c8b26904adf1747b5012a834b9cd9a4c0a7ca3f0fc597593b594b9aebaee2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3c8b26904adf1747b5012a834b9cd9a4c0a7ca3f0fc597593b594b9aebaee2a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://39f6ea9695cea2feb2db96f73d1155972bbeb524effb6b23e5a66fd7d208b91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39f6ea9695cea2feb2db96f73d1155972bbeb524effb6b23e5a66fd7d208b91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f5a53c095904f850f77e108ab8b4f90a468d69e57509c619b83936a3c3fc2df0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f5a53c095904f850f77e108ab8b4f90a468d69e57509c619b83936a3c3fc2df0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.185681 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.198320 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.217024 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5bf838ca0799a30c0a828e12202e2c66af3473686ae514b8bbd94c369d9ac58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:56Z\\\",\\\"message\\\":\\\"vices_controller.go:356] Processing sync for service openshift-marketplace/certified-operators for network=default\\\\nI0102 00:07:56.529116 6383 lb_config.go:1031] Cluster endpoints for openshift-machine-config-operator/machine-config-operator for network=default are: map[]\\\\nI0102 00:07:56.529087 6383 services_controller.go:434] Service openshift-marketplace/certified-operators retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{certified-operators openshift-marketplace db855201-4874-4bfa-8608-def310eb5a2a 5471 0 2025-02-23 05:23:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:7FOCZ3GVMQ1pwQKJahWmE09uJDRx6ab8xxcEYE] map[] [{operators.coreos.com/v1alpha1 CatalogSource certified-operators faf75094-01cc-4ebf-8761-1361fa2bf31e 0xc00779338d 0xc00779338e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: certified-operators,olm.managed: true,},ClusterIP:10.217.5.214,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.214],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:08:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.229810 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.245981 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.247298 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.247335 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.247346 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.247361 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.247374 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:28Z","lastTransitionTime":"2026-01-02T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.259868 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.275351 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.292454 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.306506 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.321039 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.334406 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a81d2fa-e00b-432c-82da-344be2e8af6b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ce4b7f2bbace6669b4af1d2b6a2aa0dfc734cc543639a61fd362427d2b364ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95f1335934f87803ed602250c386d6847423e6318047599b974707acb04494e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95f1335934f87803ed602250c386d6847423e6318047599b974707acb04494e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.346386 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62b90892-e222-44ae-8145-d377c00e8df9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a27f65aeb508c6780fd6af38a7850a088580d9434e4149e959247cf694b3751\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://203b2285af60ff595b7712de3fbac8ef7ef3f3a1309a904615ea3465b7441d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fc7bf3d6725b377ad3961d854087aa94ab8584c9e9fd376100c030e9a04d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.350211 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.350240 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.350249 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.350262 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.350270 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:28Z","lastTransitionTime":"2026-01-02T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.359011 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.371101 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.387938 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76e33f463186e4646a6c1af42efed221a9d759604e9de18c83e8eaaf1d52a1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:08:13Z\\\",\\\"message\\\":\\\"2026-01-02T00:07:28+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8318a030-49b7-4b5a-8221-3c132dc1f46c\\\\n2026-01-02T00:07:28+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8318a030-49b7-4b5a-8221-3c132dc1f46c to /host/opt/cni/bin/\\\\n2026-01-02T00:07:28Z [verbose] multus-daemon started\\\\n2026-01-02T00:07:28Z [verbose] Readiness Indicator file check\\\\n2026-01-02T00:08:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.409184 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.420080 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:28Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.452157 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.452185 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.452193 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.452206 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.452214 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:28Z","lastTransitionTime":"2026-01-02T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.469302 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.469408 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.469469 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.469505 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.469544 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:28 crc kubenswrapper[4766]: E0102 00:08:28.469695 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:08:28 crc kubenswrapper[4766]: E0102 00:08:28.469720 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:08:28 crc kubenswrapper[4766]: E0102 00:08:28.469733 4766 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:08:28 crc kubenswrapper[4766]: E0102 00:08:28.469786 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-02 00:09:32.469769517 +0000 UTC m=+147.268256671 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:08:28 crc kubenswrapper[4766]: E0102 00:08:28.469878 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:32.469837699 +0000 UTC m=+147.268324853 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:08:28 crc kubenswrapper[4766]: E0102 00:08:28.469924 4766 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:08:28 crc kubenswrapper[4766]: E0102 00:08:28.469953 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:09:32.469944232 +0000 UTC m=+147.268431386 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:08:28 crc kubenswrapper[4766]: E0102 00:08:28.470066 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:08:28 crc kubenswrapper[4766]: E0102 00:08:28.470110 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:08:28 crc kubenswrapper[4766]: E0102 00:08:28.470121 4766 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:08:28 crc kubenswrapper[4766]: E0102 00:08:28.470172 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-02 00:09:32.470155918 +0000 UTC m=+147.268643072 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:08:28 crc kubenswrapper[4766]: E0102 00:08:28.470201 4766 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:08:28 crc kubenswrapper[4766]: E0102 00:08:28.470285 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:09:32.470259642 +0000 UTC m=+147.268746836 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.533896 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.533928 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.533961 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:28 crc kubenswrapper[4766]: E0102 00:08:28.534035 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.534050 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:28 crc kubenswrapper[4766]: E0102 00:08:28.534187 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:28 crc kubenswrapper[4766]: E0102 00:08:28.534250 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:28 crc kubenswrapper[4766]: E0102 00:08:28.534316 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.554368 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.554395 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.554403 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.554416 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.554424 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:28Z","lastTransitionTime":"2026-01-02T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.656726 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.656785 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.656805 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.656831 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.656878 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:28Z","lastTransitionTime":"2026-01-02T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.759483 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.759549 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.759566 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.759590 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.759607 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:28Z","lastTransitionTime":"2026-01-02T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.862425 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.862487 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.862507 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.862531 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.862548 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:28Z","lastTransitionTime":"2026-01-02T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.965232 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.965305 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.965322 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.965345 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:28 crc kubenswrapper[4766]: I0102 00:08:28.965365 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:28Z","lastTransitionTime":"2026-01-02T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.068250 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.068302 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.068320 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.068345 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.068361 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:29Z","lastTransitionTime":"2026-01-02T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.143564 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovnkube-controller/3.log" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.144510 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovnkube-controller/2.log" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.150800 4766 generic.go:334] "Generic (PLEG): container finished" podID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerID="e5bf838ca0799a30c0a828e12202e2c66af3473686ae514b8bbd94c369d9ac58" exitCode=1 Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.150865 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerDied","Data":"e5bf838ca0799a30c0a828e12202e2c66af3473686ae514b8bbd94c369d9ac58"} Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.150913 4766 scope.go:117] "RemoveContainer" containerID="eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.151691 4766 scope.go:117] "RemoveContainer" containerID="e5bf838ca0799a30c0a828e12202e2c66af3473686ae514b8bbd94c369d9ac58" Jan 02 00:08:29 crc kubenswrapper[4766]: E0102 00:08:29.151834 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.170862 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.170893 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.170905 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.170925 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.170937 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:29Z","lastTransitionTime":"2026-01-02T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.173773 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.191313 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.208315 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.227799 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.239381 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.257672 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.269120 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.273590 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.273641 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.273658 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.273682 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.273698 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:29Z","lastTransitionTime":"2026-01-02T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.285525 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76e33f463186e4646a6c1af42efed221a9d759604e9de18c83e8eaaf1d52a1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:08:13Z\\\",\\\"message\\\":\\\"2026-01-02T00:07:28+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8318a030-49b7-4b5a-8221-3c132dc1f46c\\\\n2026-01-02T00:07:28+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8318a030-49b7-4b5a-8221-3c132dc1f46c to /host/opt/cni/bin/\\\\n2026-01-02T00:07:28Z [verbose] multus-daemon started\\\\n2026-01-02T00:07:28Z [verbose] Readiness Indicator file check\\\\n2026-01-02T00:08:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.300645 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.312468 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.324459 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a81d2fa-e00b-432c-82da-344be2e8af6b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ce4b7f2bbace6669b4af1d2b6a2aa0dfc734cc543639a61fd362427d2b364ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95f1335934f87803ed602250c386d6847423e6318047599b974707acb04494e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95f1335934f87803ed602250c386d6847423e6318047599b974707acb04494e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.336687 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62b90892-e222-44ae-8145-d377c00e8df9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a27f65aeb508c6780fd6af38a7850a088580d9434e4149e959247cf694b3751\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://203b2285af60ff595b7712de3fbac8ef7ef3f3a1309a904615ea3465b7441d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fc7bf3d6725b377ad3961d854087aa94ab8584c9e9fd376100c030e9a04d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.351805 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.362962 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.376005 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.376072 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.376096 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.376128 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.376149 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:29Z","lastTransitionTime":"2026-01-02T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.380557 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5bf838ca0799a30c0a828e12202e2c66af3473686ae514b8bbd94c369d9ac58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb07fb30872442073c49fc38662c7f4411c1e954f4e05ee2e9be5e1ab69d78ad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:07:56Z\\\",\\\"message\\\":\\\"vices_controller.go:356] Processing sync for service openshift-marketplace/certified-operators for network=default\\\\nI0102 00:07:56.529116 6383 lb_config.go:1031] Cluster endpoints for openshift-machine-config-operator/machine-config-operator for network=default are: map[]\\\\nI0102 00:07:56.529087 6383 services_controller.go:434] Service openshift-marketplace/certified-operators retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{certified-operators openshift-marketplace db855201-4874-4bfa-8608-def310eb5a2a 5471 0 2025-02-23 05:23:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:7FOCZ3GVMQ1pwQKJahWmE09uJDRx6ab8xxcEYE] map[] [{operators.coreos.com/v1alpha1 CatalogSource certified-operators faf75094-01cc-4ebf-8761-1361fa2bf31e 0xc00779338d 0xc00779338e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: certified-operators,olm.managed: true,},ClusterIP:10.217.5.214,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.214],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e5bf838ca0799a30c0a828e12202e2c66af3473686ae514b8bbd94c369d9ac58\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:08:28Z\\\",\\\"message\\\":\\\"ces.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0102 00:08:28.835966 6840 services_controller.go:444] Built service openshift-etcd-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0102 00:08:28.836057 6840 services_controller.go:444] Built service openshift-service-ca-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0102 00:08:28.836065 6840 services_controller.go:445] Built service openshift-etcd-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI0102 00:08:28.836073 6840 services_controller.go:445] Built service openshift-service-ca-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI0102 00:08:28.835535 6840 services_controller.go:443] Built service openshift-machine-api/machine-api-operator-webhook LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.254\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0102 00:08:28.836096 6840 services_controller.go:451] Built service openshift-service-ca-oper\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:08:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.392143 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.411128 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d465f6da-b83d-4306-878a-67ecd3590998\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab50448d0cde9a3982d3106c391537332529754231f74e10b5599f122c56319c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ca99de9b6ec4a85e4ee32254fdf93f846faf036748297d327bd4e0007954215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3345a83fca294debd1ed87181d996d662bc3facf4d4484c64116590d05ac4025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://733b6a898d0c6d48820f54912587d034469a7103af84972716331b1f5d87e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://06e451949779c2c0c34eb41926158609bc00dfce68bfa4d28ca5fd066ca1e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3c8b26904adf1747b5012a834b9cd9a4c0a7ca3f0fc597593b594b9aebaee2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3c8b26904adf1747b5012a834b9cd9a4c0a7ca3f0fc597593b594b9aebaee2a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://39f6ea9695cea2feb2db96f73d1155972bbeb524effb6b23e5a66fd7d208b91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39f6ea9695cea2feb2db96f73d1155972bbeb524effb6b23e5a66fd7d208b91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f5a53c095904f850f77e108ab8b4f90a468d69e57509c619b83936a3c3fc2df0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f5a53c095904f850f77e108ab8b4f90a468d69e57509c619b83936a3c3fc2df0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.425929 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.440274 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:29Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.478723 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.478775 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.478792 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.478813 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.478830 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:29Z","lastTransitionTime":"2026-01-02T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.581366 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.581462 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.581479 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.581503 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.581522 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:29Z","lastTransitionTime":"2026-01-02T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.684054 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.684109 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.684126 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.684149 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.684168 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:29Z","lastTransitionTime":"2026-01-02T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.787725 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.787800 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.787825 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.787929 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.787959 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:29Z","lastTransitionTime":"2026-01-02T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.890801 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.890861 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.890875 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.890891 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.890902 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:29Z","lastTransitionTime":"2026-01-02T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.994112 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.994173 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.994196 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.994225 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:29 crc kubenswrapper[4766]: I0102 00:08:29.994249 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:29Z","lastTransitionTime":"2026-01-02T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.097596 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.097658 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.097682 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.097711 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.097732 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:30Z","lastTransitionTime":"2026-01-02T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.156662 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovnkube-controller/3.log" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.163662 4766 scope.go:117] "RemoveContainer" containerID="e5bf838ca0799a30c0a828e12202e2c66af3473686ae514b8bbd94c369d9ac58" Jan 02 00:08:30 crc kubenswrapper[4766]: E0102 00:08:30.164072 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.187206 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62b90892-e222-44ae-8145-d377c00e8df9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a27f65aeb508c6780fd6af38a7850a088580d9434e4149e959247cf694b3751\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://203b2285af60ff595b7712de3fbac8ef7ef3f3a1309a904615ea3465b7441d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fc7bf3d6725b377ad3961d854087aa94ab8584c9e9fd376100c030e9a04d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.201621 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.201656 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.201699 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.201732 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.201746 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:30Z","lastTransitionTime":"2026-01-02T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.206829 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.221990 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.242149 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76e33f463186e4646a6c1af42efed221a9d759604e9de18c83e8eaaf1d52a1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:08:13Z\\\",\\\"message\\\":\\\"2026-01-02T00:07:28+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8318a030-49b7-4b5a-8221-3c132dc1f46c\\\\n2026-01-02T00:07:28+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8318a030-49b7-4b5a-8221-3c132dc1f46c to /host/opt/cni/bin/\\\\n2026-01-02T00:07:28Z [verbose] multus-daemon started\\\\n2026-01-02T00:07:28Z [verbose] Readiness Indicator file check\\\\n2026-01-02T00:08:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.257099 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.272496 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.286640 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a81d2fa-e00b-432c-82da-344be2e8af6b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ce4b7f2bbace6669b4af1d2b6a2aa0dfc734cc543639a61fd362427d2b364ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95f1335934f87803ed602250c386d6847423e6318047599b974707acb04494e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95f1335934f87803ed602250c386d6847423e6318047599b974707acb04494e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.304801 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.304829 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.304865 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.304883 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.304894 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:30Z","lastTransitionTime":"2026-01-02T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.308822 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d465f6da-b83d-4306-878a-67ecd3590998\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab50448d0cde9a3982d3106c391537332529754231f74e10b5599f122c56319c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ca99de9b6ec4a85e4ee32254fdf93f846faf036748297d327bd4e0007954215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3345a83fca294debd1ed87181d996d662bc3facf4d4484c64116590d05ac4025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://733b6a898d0c6d48820f54912587d034469a7103af84972716331b1f5d87e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://06e451949779c2c0c34eb41926158609bc00dfce68bfa4d28ca5fd066ca1e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3c8b26904adf1747b5012a834b9cd9a4c0a7ca3f0fc597593b594b9aebaee2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3c8b26904adf1747b5012a834b9cd9a4c0a7ca3f0fc597593b594b9aebaee2a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://39f6ea9695cea2feb2db96f73d1155972bbeb524effb6b23e5a66fd7d208b91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39f6ea9695cea2feb2db96f73d1155972bbeb524effb6b23e5a66fd7d208b91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f5a53c095904f850f77e108ab8b4f90a468d69e57509c619b83936a3c3fc2df0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f5a53c095904f850f77e108ab8b4f90a468d69e57509c619b83936a3c3fc2df0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.326211 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.347381 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.372729 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5bf838ca0799a30c0a828e12202e2c66af3473686ae514b8bbd94c369d9ac58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e5bf838ca0799a30c0a828e12202e2c66af3473686ae514b8bbd94c369d9ac58\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:08:28Z\\\",\\\"message\\\":\\\"ces.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0102 00:08:28.835966 6840 services_controller.go:444] Built service openshift-etcd-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0102 00:08:28.836057 6840 services_controller.go:444] Built service openshift-service-ca-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0102 00:08:28.836065 6840 services_controller.go:445] Built service openshift-etcd-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI0102 00:08:28.836073 6840 services_controller.go:445] Built service openshift-service-ca-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI0102 00:08:28.835535 6840 services_controller.go:443] Built service openshift-machine-api/machine-api-operator-webhook LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.254\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0102 00:08:28.836096 6840 services_controller.go:451] Built service openshift-service-ca-oper\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:08:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.387592 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.403544 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.407808 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.408166 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.408279 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.408380 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.408474 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:30Z","lastTransitionTime":"2026-01-02T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.424394 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.440235 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.454923 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.464973 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.465203 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.465401 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.465532 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.465697 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:30Z","lastTransitionTime":"2026-01-02T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.472951 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: E0102 00:08:30.480486 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.485247 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.485936 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.486000 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.486021 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.486051 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.486075 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:30Z","lastTransitionTime":"2026-01-02T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.497932 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: E0102 00:08:30.499979 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.505859 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.505895 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.505928 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.505942 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.505953 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:30Z","lastTransitionTime":"2026-01-02T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:30 crc kubenswrapper[4766]: E0102 00:08:30.518679 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.522564 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.522891 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.522960 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.523026 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.523106 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:30Z","lastTransitionTime":"2026-01-02T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.533181 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.533288 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:30 crc kubenswrapper[4766]: E0102 00:08:30.533460 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.533495 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:30 crc kubenswrapper[4766]: E0102 00:08:30.533592 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.533636 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:30 crc kubenswrapper[4766]: E0102 00:08:30.533658 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:30 crc kubenswrapper[4766]: E0102 00:08:30.533760 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:30 crc kubenswrapper[4766]: E0102 00:08:30.537684 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.540935 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.540974 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.540986 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.541005 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.541018 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:30Z","lastTransitionTime":"2026-01-02T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:30 crc kubenswrapper[4766]: E0102 00:08:30.556367 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:30Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:30 crc kubenswrapper[4766]: E0102 00:08:30.556572 4766 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.562695 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.562796 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.562818 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.562867 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.562900 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:30Z","lastTransitionTime":"2026-01-02T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.666303 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.666336 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.666345 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.666358 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.666367 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:30Z","lastTransitionTime":"2026-01-02T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.769706 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.769751 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.769768 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.769789 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.769806 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:30Z","lastTransitionTime":"2026-01-02T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.872569 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.872643 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.872667 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.872694 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.872716 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:30Z","lastTransitionTime":"2026-01-02T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.975498 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.975543 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.975554 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.975570 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:30 crc kubenswrapper[4766]: I0102 00:08:30.975581 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:30Z","lastTransitionTime":"2026-01-02T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.078199 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.078233 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.078244 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.078295 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.078310 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:31Z","lastTransitionTime":"2026-01-02T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.180633 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.181710 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.181885 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.182029 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.182233 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:31Z","lastTransitionTime":"2026-01-02T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.285985 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.286019 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.286031 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.286047 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.286059 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:31Z","lastTransitionTime":"2026-01-02T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.388683 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.388745 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.388767 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.388796 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.388818 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:31Z","lastTransitionTime":"2026-01-02T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.492180 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.492231 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.492248 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.492269 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.492286 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:31Z","lastTransitionTime":"2026-01-02T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.594565 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.594603 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.594612 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.594626 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.594639 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:31Z","lastTransitionTime":"2026-01-02T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.698225 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.698570 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.698741 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.698976 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.699199 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:31Z","lastTransitionTime":"2026-01-02T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.801954 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.802000 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.802013 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.802029 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.802040 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:31Z","lastTransitionTime":"2026-01-02T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.904907 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.905320 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.905474 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.905630 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:31 crc kubenswrapper[4766]: I0102 00:08:31.905773 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:31Z","lastTransitionTime":"2026-01-02T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.009752 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.009821 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.009896 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.009933 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.009958 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:32Z","lastTransitionTime":"2026-01-02T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.112776 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.112822 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.112834 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.112876 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.112890 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:32Z","lastTransitionTime":"2026-01-02T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.215134 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.215205 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.215222 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.215239 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.215251 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:32Z","lastTransitionTime":"2026-01-02T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.317368 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.317643 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.317755 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.317871 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.317977 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:32Z","lastTransitionTime":"2026-01-02T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.421038 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.421455 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.421626 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.421903 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.422113 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:32Z","lastTransitionTime":"2026-01-02T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.525364 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.525432 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.525449 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.525473 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.525492 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:32Z","lastTransitionTime":"2026-01-02T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.533876 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.533931 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.533976 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.533932 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:32 crc kubenswrapper[4766]: E0102 00:08:32.534096 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:32 crc kubenswrapper[4766]: E0102 00:08:32.534451 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:32 crc kubenswrapper[4766]: E0102 00:08:32.534675 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:32 crc kubenswrapper[4766]: E0102 00:08:32.534780 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.628397 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.628456 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.628476 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.628504 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.628522 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:32Z","lastTransitionTime":"2026-01-02T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.730669 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.730787 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.730813 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.730894 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.730925 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:32Z","lastTransitionTime":"2026-01-02T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.834455 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.834533 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.834558 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.834586 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.834608 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:32Z","lastTransitionTime":"2026-01-02T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.937173 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.937231 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.937248 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.937270 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:32 crc kubenswrapper[4766]: I0102 00:08:32.937287 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:32Z","lastTransitionTime":"2026-01-02T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.040289 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.040376 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.040395 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.040424 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.040443 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:33Z","lastTransitionTime":"2026-01-02T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.143299 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.143365 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.143389 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.143419 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.143441 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:33Z","lastTransitionTime":"2026-01-02T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.246644 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.246726 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.246753 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.246785 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.246811 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:33Z","lastTransitionTime":"2026-01-02T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.350385 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.350459 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.350482 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.350514 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.350536 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:33Z","lastTransitionTime":"2026-01-02T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.453206 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.453254 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.453267 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.453284 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.453297 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:33Z","lastTransitionTime":"2026-01-02T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.559089 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.559171 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.559195 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.559229 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.559264 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:33Z","lastTransitionTime":"2026-01-02T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.662941 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.663034 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.663060 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.663093 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.663117 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:33Z","lastTransitionTime":"2026-01-02T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.765667 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.765707 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.765716 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.765730 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.765738 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:33Z","lastTransitionTime":"2026-01-02T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.868989 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.869412 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.870309 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.870681 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.871208 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:33Z","lastTransitionTime":"2026-01-02T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.974942 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.975278 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.975371 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.975465 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:33 crc kubenswrapper[4766]: I0102 00:08:33.975545 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:33Z","lastTransitionTime":"2026-01-02T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.078111 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.078173 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.078191 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.078215 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.078232 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:34Z","lastTransitionTime":"2026-01-02T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.181132 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.181197 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.181214 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.181237 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.181254 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:34Z","lastTransitionTime":"2026-01-02T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.284505 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.284578 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.284597 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.284624 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.284647 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:34Z","lastTransitionTime":"2026-01-02T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.387951 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.388014 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.388032 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.388056 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.388073 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:34Z","lastTransitionTime":"2026-01-02T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.490588 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.490631 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.490645 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.490664 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.490675 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:34Z","lastTransitionTime":"2026-01-02T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.533748 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:34 crc kubenswrapper[4766]: E0102 00:08:34.533921 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.534060 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:34 crc kubenswrapper[4766]: E0102 00:08:34.534214 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.534324 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.534365 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:34 crc kubenswrapper[4766]: E0102 00:08:34.534671 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:34 crc kubenswrapper[4766]: E0102 00:08:34.534820 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.593089 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.593402 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.593609 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.593807 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.594053 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:34Z","lastTransitionTime":"2026-01-02T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.696881 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.697239 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.697459 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.697671 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.697826 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:34Z","lastTransitionTime":"2026-01-02T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.801285 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.801335 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.801352 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.801376 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.801394 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:34Z","lastTransitionTime":"2026-01-02T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.903963 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.904031 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.904045 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.904062 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:34 crc kubenswrapper[4766]: I0102 00:08:34.904098 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:34Z","lastTransitionTime":"2026-01-02T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.007025 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.007082 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.007098 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.007124 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.007141 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:35Z","lastTransitionTime":"2026-01-02T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.110129 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.110439 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.110711 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.110931 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.111132 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:35Z","lastTransitionTime":"2026-01-02T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.214057 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.214485 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.214762 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.215027 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.215175 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:35Z","lastTransitionTime":"2026-01-02T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.318016 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.318056 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.318068 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.318085 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.318097 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:35Z","lastTransitionTime":"2026-01-02T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.421498 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.421563 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.421581 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.421606 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.421627 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:35Z","lastTransitionTime":"2026-01-02T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.524608 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.524686 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.524710 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.524740 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.524762 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:35Z","lastTransitionTime":"2026-01-02T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.552819 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.575561 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s78r2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e2dfc33-3d7d-4542-82d0-91c64b3cf0ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baf837a620d6a41b227673784c844b5f74bb2e2eb7cfce890f8863e8d4cde511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def2d932129c1d92ef84256c48777f2af992e2c240a267356c6651c84885d2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce26f942b09ceae419bc1305de00acbf60ae39f9bb38689247800549f07318af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f85906ea735fef577a78bbc63a322239c03ebc53ea3200b40c95970b7b56d53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://625007e46e216a8a80c2efedb2524f356273dc556b2d0407702c559e4464835b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e698c4ecc7e6793feb8580359e25e3eab5d7fa7bb14248a3c678f568ceb6b5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9e2dab69675ec16298882f89115ff01b6d152de7db13b008c68611fb1450fc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqnpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s78r2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.595894 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66b7fd44-a7ca-46a0-a585-cb37baccc49e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5bf838ca0799a30c0a828e12202e2c66af3473686ae514b8bbd94c369d9ac58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e5bf838ca0799a30c0a828e12202e2c66af3473686ae514b8bbd94c369d9ac58\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:08:28Z\\\",\\\"message\\\":\\\"ces.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0102 00:08:28.835966 6840 services_controller.go:444] Built service openshift-etcd-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0102 00:08:28.836057 6840 services_controller.go:444] Built service openshift-service-ca-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0102 00:08:28.836065 6840 services_controller.go:445] Built service openshift-etcd-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI0102 00:08:28.836073 6840 services_controller.go:445] Built service openshift-service-ca-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI0102 00:08:28.835535 6840 services_controller.go:443] Built service openshift-machine-api/machine-api-operator-webhook LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.254\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0102 00:08:28.836096 6840 services_controller.go:451] Built service openshift-service-ca-oper\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:08:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbsns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.616683 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cad8c8c8-e3a5-4066-821f-eb884896d207\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0102 00:07:17.935151 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0102 00:07:17.937233 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2082234913/tls.crt::/tmp/serving-cert-2082234913/tls.key\\\\\\\"\\\\nI0102 00:07:24.064445 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0102 00:07:24.066662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0102 00:07:24.066681 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0102 00:07:24.066701 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0102 00:07:24.066706 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0102 00:07:24.074100 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0102 00:07:24.074124 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074128 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0102 00:07:24.074132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0102 00:07:24.074135 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0102 00:07:24.074138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0102 00:07:24.074140 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0102 00:07:24.074208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0102 00:07:24.077169 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.627048 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.627091 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.627103 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.627120 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.627136 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:35Z","lastTransitionTime":"2026-01-02T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.646006 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d465f6da-b83d-4306-878a-67ecd3590998\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab50448d0cde9a3982d3106c391537332529754231f74e10b5599f122c56319c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ca99de9b6ec4a85e4ee32254fdf93f846faf036748297d327bd4e0007954215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3345a83fca294debd1ed87181d996d662bc3facf4d4484c64116590d05ac4025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://733b6a898d0c6d48820f54912587d034469a7103af84972716331b1f5d87e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://06e451949779c2c0c34eb41926158609bc00dfce68bfa4d28ca5fd066ca1e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3c8b26904adf1747b5012a834b9cd9a4c0a7ca3f0fc597593b594b9aebaee2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3c8b26904adf1747b5012a834b9cd9a4c0a7ca3f0fc597593b594b9aebaee2a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://39f6ea9695cea2feb2db96f73d1155972bbeb524effb6b23e5a66fd7d208b91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39f6ea9695cea2feb2db96f73d1155972bbeb524effb6b23e5a66fd7d208b91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f5a53c095904f850f77e108ab8b4f90a468d69e57509c619b83936a3c3fc2df0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f5a53c095904f850f77e108ab8b4f90a468d69e57509c619b83936a3c3fc2df0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.660048 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0652b8c671f0f68b34a4582e8901b42ea790c408f0418f922439cba661272693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.673368 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a62872f4-c5c6-4f43-ba8a-40e0c96e6968\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e3e9e4d27efd2109f93153f2d3503317780d811f74eee1ba5c5cdf81d3f6d40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvrt6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hlsf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.702279 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-56zlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"795ac7da-eca3-4b11-8a9c-308150aa9164\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8b0581f8376440ea7f8530f4bd824dd456f9c157bab3ce4fbe9d1667bda9344\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwznf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-56zlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.724156 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c21db25e-e0be-4a86-8268-5e180dbaa525\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fea5fd918a0ad79fc79ffa5bae86c8be63dc3ec1ec6dce6487bd07028e65bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://815da9b7fa8b9832441867606a46600d7e3fe9254ff2ac488b2d73b81524d317\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee33636b2532473172243b113198ab8f7cae15c11fc936abad1cf2dcdeecfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.729722 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.729766 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.729779 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.729793 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.729803 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:35Z","lastTransitionTime":"2026-01-02T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.744539 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.759521 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6bf958e22d182d14b409084d12a4ae46b956ef9f0232c8e44fafe089d14f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f92ac0ad9e3714750c2b77cf85e920df744bdba097a7f62b89d14694291cd42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.771996 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mdmsr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ecd9664-bbd8-46b5-81d8-98d0cdb5e8cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a3fe82c43eaa6a8951831e38cf8c4c7515c1465979853c5d0ca635ef7401151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kfs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mdmsr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.783402 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.794238 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2e364626ae658186ea7d3091d39f859d2a2fde6d1a080515cb69a3b4368ac24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.808183 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lmc2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc58583-1116-4066-9ca8-da659b367839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76e33f463186e4646a6c1af42efed221a9d759604e9de18c83e8eaaf1d52a1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-02T00:08:13Z\\\",\\\"message\\\":\\\"2026-01-02T00:07:28+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8318a030-49b7-4b5a-8221-3c132dc1f46c\\\\n2026-01-02T00:07:28+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8318a030-49b7-4b5a-8221-3c132dc1f46c to /host/opt/cni/bin/\\\\n2026-01-02T00:07:28Z [verbose] multus-daemon started\\\\n2026-01-02T00:07:28Z [verbose] Readiness Indicator file check\\\\n2026-01-02T00:08:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmj8l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lmc2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.819543 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c79dbb1-c0a5-4c74-be7c-65e8823fc29a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e91a55650d8ed56ce51f4480344aca4f420ab8bf7b183f4fbfd319dd749767d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://459fdda1327f2bc537c0255c6da3a9aab83cfa31c50f4381f99fdb7bc36652ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xn7v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-95799\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.832471 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.832550 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.832568 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.832592 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.832616 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:35Z","lastTransitionTime":"2026-01-02T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.836772 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f869b527-abf2-400f-90c2-ebee595564a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dj9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zd2t8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.848436 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a81d2fa-e00b-432c-82da-344be2e8af6b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ce4b7f2bbace6669b4af1d2b6a2aa0dfc734cc543639a61fd362427d2b364ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95f1335934f87803ed602250c386d6847423e6318047599b974707acb04494e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95f1335934f87803ed602250c386d6847423e6318047599b974707acb04494e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.860110 4766 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62b90892-e222-44ae-8145-d377c00e8df9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-02T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a27f65aeb508c6780fd6af38a7850a088580d9434e4149e959247cf694b3751\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://203b2285af60ff595b7712de3fbac8ef7ef3f3a1309a904615ea3465b7441d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fc7bf3d6725b377ad3961d854087aa94ab8584c9e9fd376100c030e9a04d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-02T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b8c8dd72de9fafe7631bbd60852a56891946a6fbc1686c3ac71682a55a297a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-02T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-02T00:07:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-02T00:07:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:35Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.934854 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.934894 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.934904 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.934918 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:35 crc kubenswrapper[4766]: I0102 00:08:35.934928 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:35Z","lastTransitionTime":"2026-01-02T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.038142 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.038211 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.038229 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.038253 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.038271 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:36Z","lastTransitionTime":"2026-01-02T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.140794 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.140894 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.140920 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.140946 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.140964 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:36Z","lastTransitionTime":"2026-01-02T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.244084 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.244148 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.244171 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.244199 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.244220 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:36Z","lastTransitionTime":"2026-01-02T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.346957 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.347025 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.347046 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.347071 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.347089 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:36Z","lastTransitionTime":"2026-01-02T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.450340 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.450517 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.450545 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.450572 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.450591 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:36Z","lastTransitionTime":"2026-01-02T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.533333 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.533398 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.533369 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:36 crc kubenswrapper[4766]: E0102 00:08:36.533563 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.533333 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:36 crc kubenswrapper[4766]: E0102 00:08:36.533771 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:36 crc kubenswrapper[4766]: E0102 00:08:36.534186 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:36 crc kubenswrapper[4766]: E0102 00:08:36.534477 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.554719 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.554775 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.554789 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.554808 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.554825 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:36Z","lastTransitionTime":"2026-01-02T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.657209 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.657269 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.657289 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.657318 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.657346 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:36Z","lastTransitionTime":"2026-01-02T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.759280 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.759325 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.759336 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.759353 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.759363 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:36Z","lastTransitionTime":"2026-01-02T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.862134 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.862194 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.862212 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.862235 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.862252 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:36Z","lastTransitionTime":"2026-01-02T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.967602 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.967666 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.967681 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.967701 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:36 crc kubenswrapper[4766]: I0102 00:08:36.967718 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:36Z","lastTransitionTime":"2026-01-02T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.074686 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.074751 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.074772 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.074800 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.074821 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:37Z","lastTransitionTime":"2026-01-02T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.178168 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.178234 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.178252 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.178274 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.178291 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:37Z","lastTransitionTime":"2026-01-02T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.281222 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.281282 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.281307 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.281336 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.281353 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:37Z","lastTransitionTime":"2026-01-02T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.384338 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.384433 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.384459 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.384817 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.384837 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:37Z","lastTransitionTime":"2026-01-02T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.488070 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.488183 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.488207 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.488237 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.488258 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:37Z","lastTransitionTime":"2026-01-02T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.591129 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.591171 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.591183 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.591199 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.591212 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:37Z","lastTransitionTime":"2026-01-02T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.694457 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.694519 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.694540 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.694566 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.694586 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:37Z","lastTransitionTime":"2026-01-02T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.797529 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.797581 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.797591 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.797609 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.797623 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:37Z","lastTransitionTime":"2026-01-02T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.900649 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.900719 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.900737 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.900762 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:37 crc kubenswrapper[4766]: I0102 00:08:37.900780 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:37Z","lastTransitionTime":"2026-01-02T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.003934 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.004038 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.004057 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.004126 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.004147 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:38Z","lastTransitionTime":"2026-01-02T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.107377 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.107448 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.107468 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.107493 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.107515 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:38Z","lastTransitionTime":"2026-01-02T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.210409 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.210468 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.210482 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.210504 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.210519 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:38Z","lastTransitionTime":"2026-01-02T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.312997 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.313066 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.313084 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.313106 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.313122 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:38Z","lastTransitionTime":"2026-01-02T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.417404 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.417468 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.417483 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.417503 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.417521 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:38Z","lastTransitionTime":"2026-01-02T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.521156 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.521234 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.521270 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.521306 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.521324 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:38Z","lastTransitionTime":"2026-01-02T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.533331 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.533371 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.533331 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:38 crc kubenswrapper[4766]: E0102 00:08:38.533446 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.533479 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:38 crc kubenswrapper[4766]: E0102 00:08:38.533547 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:38 crc kubenswrapper[4766]: E0102 00:08:38.533607 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:38 crc kubenswrapper[4766]: E0102 00:08:38.533668 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.624002 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.624083 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.624101 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.624130 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.624155 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:38Z","lastTransitionTime":"2026-01-02T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.727123 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.727174 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.727214 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.727235 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.727254 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:38Z","lastTransitionTime":"2026-01-02T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.830010 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.830069 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.830107 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.830135 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.830157 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:38Z","lastTransitionTime":"2026-01-02T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.933269 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.933316 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.933332 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.933355 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:38 crc kubenswrapper[4766]: I0102 00:08:38.933372 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:38Z","lastTransitionTime":"2026-01-02T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.036966 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.037028 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.037052 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.037079 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.037099 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:39Z","lastTransitionTime":"2026-01-02T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.140327 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.140390 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.140407 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.140431 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.140449 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:39Z","lastTransitionTime":"2026-01-02T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.243518 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.243601 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.243613 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.243635 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.243648 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:39Z","lastTransitionTime":"2026-01-02T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.346396 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.346460 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.346477 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.346501 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.346520 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:39Z","lastTransitionTime":"2026-01-02T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.449825 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.449880 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.449889 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.449908 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.449918 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:39Z","lastTransitionTime":"2026-01-02T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.552591 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.552641 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.552657 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.552677 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.552694 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:39Z","lastTransitionTime":"2026-01-02T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.655325 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.655402 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.655415 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.655433 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.655445 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:39Z","lastTransitionTime":"2026-01-02T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.758375 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.758434 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.758450 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.758473 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.758491 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:39Z","lastTransitionTime":"2026-01-02T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.861826 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.862053 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.862085 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.862115 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.862136 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:39Z","lastTransitionTime":"2026-01-02T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.965001 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.965080 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.965112 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.965145 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:39 crc kubenswrapper[4766]: I0102 00:08:39.965164 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:39Z","lastTransitionTime":"2026-01-02T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.067932 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.068028 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.068055 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.068286 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.068358 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:40Z","lastTransitionTime":"2026-01-02T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.171427 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.171493 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.171512 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.171539 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.171558 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:40Z","lastTransitionTime":"2026-01-02T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.274922 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.275003 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.275016 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.275035 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.275047 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:40Z","lastTransitionTime":"2026-01-02T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.377634 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.377685 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.377697 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.377715 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.377733 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:40Z","lastTransitionTime":"2026-01-02T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.480677 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.480736 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.480750 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.480768 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.480780 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:40Z","lastTransitionTime":"2026-01-02T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.533436 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.533493 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.533534 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:40 crc kubenswrapper[4766]: E0102 00:08:40.533601 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.533716 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:40 crc kubenswrapper[4766]: E0102 00:08:40.533921 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:40 crc kubenswrapper[4766]: E0102 00:08:40.533983 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:40 crc kubenswrapper[4766]: E0102 00:08:40.534149 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.583746 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.583803 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.583821 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.583912 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.583933 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:40Z","lastTransitionTime":"2026-01-02T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.686197 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.686250 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.686268 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.686290 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.686309 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:40Z","lastTransitionTime":"2026-01-02T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.789053 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.789127 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.789148 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.789174 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.789191 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:40Z","lastTransitionTime":"2026-01-02T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.893346 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.893400 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.893417 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.893439 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.893457 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:40Z","lastTransitionTime":"2026-01-02T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.937255 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.937424 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.937454 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.937540 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.937615 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:40Z","lastTransitionTime":"2026-01-02T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:40 crc kubenswrapper[4766]: E0102 00:08:40.965164 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.971333 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.971377 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.971407 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.971426 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.971436 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:40Z","lastTransitionTime":"2026-01-02T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:40 crc kubenswrapper[4766]: E0102 00:08:40.991376 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:40Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.996118 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.996174 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.996198 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.996231 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:40 crc kubenswrapper[4766]: I0102 00:08:40.996257 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:40Z","lastTransitionTime":"2026-01-02T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:41 crc kubenswrapper[4766]: E0102 00:08:41.020996 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:41Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.026210 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.026259 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.026270 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.026288 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.026300 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:41Z","lastTransitionTime":"2026-01-02T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:41 crc kubenswrapper[4766]: E0102 00:08:41.046116 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:41Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.050216 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.050282 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.050304 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.050329 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.050347 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:41Z","lastTransitionTime":"2026-01-02T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:41 crc kubenswrapper[4766]: E0102 00:08:41.069661 4766 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-02T00:08:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-02T00:08:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"128f0816-59c2-4555-a5b2-57e34966d0da\\\",\\\"systemUUID\\\":\\\"7aff2e2a-6602-4011-9647-4cd49a97a1f5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-02T00:08:41Z is after 2025-08-24T17:21:41Z" Jan 02 00:08:41 crc kubenswrapper[4766]: E0102 00:08:41.069968 4766 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.072163 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.072223 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.072233 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.072247 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.072259 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:41Z","lastTransitionTime":"2026-01-02T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.174951 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.174992 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.175003 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.175020 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.175031 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:41Z","lastTransitionTime":"2026-01-02T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.278658 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.278727 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.278745 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.278769 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.278787 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:41Z","lastTransitionTime":"2026-01-02T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.381577 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.381663 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.381683 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.381708 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.381727 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:41Z","lastTransitionTime":"2026-01-02T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.485272 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.485357 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.485380 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.485411 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.485433 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:41Z","lastTransitionTime":"2026-01-02T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.588384 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.588472 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.588490 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.588513 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.588529 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:41Z","lastTransitionTime":"2026-01-02T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.691429 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.691500 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.691518 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.691543 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.691560 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:41Z","lastTransitionTime":"2026-01-02T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.794294 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.794378 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.794401 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.794430 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.794455 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:41Z","lastTransitionTime":"2026-01-02T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.897016 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.897111 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.897128 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.897151 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:41 crc kubenswrapper[4766]: I0102 00:08:41.897168 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:41Z","lastTransitionTime":"2026-01-02T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.000162 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.000257 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.000282 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.000348 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.000371 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:42Z","lastTransitionTime":"2026-01-02T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.103632 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.103678 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.103690 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.103707 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.103722 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:42Z","lastTransitionTime":"2026-01-02T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.206496 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.206555 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.206573 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.206596 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.206613 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:42Z","lastTransitionTime":"2026-01-02T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.309662 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.309718 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.309734 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.309759 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.309779 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:42Z","lastTransitionTime":"2026-01-02T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.413168 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.413230 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.413251 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.413277 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.413295 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:42Z","lastTransitionTime":"2026-01-02T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.516485 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.516523 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.516550 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.516563 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.516572 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:42Z","lastTransitionTime":"2026-01-02T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.533420 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.533554 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:42 crc kubenswrapper[4766]: E0102 00:08:42.533741 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.533871 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.533916 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:42 crc kubenswrapper[4766]: E0102 00:08:42.534229 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:42 crc kubenswrapper[4766]: E0102 00:08:42.534982 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:42 crc kubenswrapper[4766]: E0102 00:08:42.535333 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.535379 4766 scope.go:117] "RemoveContainer" containerID="e5bf838ca0799a30c0a828e12202e2c66af3473686ae514b8bbd94c369d9ac58" Jan 02 00:08:42 crc kubenswrapper[4766]: E0102 00:08:42.535614 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.620005 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.620082 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.620100 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.620125 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.620143 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:42Z","lastTransitionTime":"2026-01-02T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.723228 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.723315 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.723339 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.723369 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.723392 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:42Z","lastTransitionTime":"2026-01-02T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.826928 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.826986 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.827003 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.827027 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.827044 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:42Z","lastTransitionTime":"2026-01-02T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.930014 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.930110 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.930134 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.930165 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:42 crc kubenswrapper[4766]: I0102 00:08:42.930189 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:42Z","lastTransitionTime":"2026-01-02T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.033208 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.033289 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.033314 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.033344 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.033370 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:43Z","lastTransitionTime":"2026-01-02T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.135765 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.135827 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.135873 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.135899 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.135916 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:43Z","lastTransitionTime":"2026-01-02T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.238651 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.238716 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.238753 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.238785 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.238806 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:43Z","lastTransitionTime":"2026-01-02T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.341004 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.341100 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.341119 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.341142 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.341160 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:43Z","lastTransitionTime":"2026-01-02T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.444958 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.445046 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.445080 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.445112 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.445133 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:43Z","lastTransitionTime":"2026-01-02T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.548368 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.548442 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.548472 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.548507 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.548527 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:43Z","lastTransitionTime":"2026-01-02T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.655687 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.655747 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.655763 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.655785 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.655802 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:43Z","lastTransitionTime":"2026-01-02T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.760228 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.760335 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.760353 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.760380 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.760403 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:43Z","lastTransitionTime":"2026-01-02T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.869565 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.869601 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.869627 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.869639 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.869648 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:43Z","lastTransitionTime":"2026-01-02T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.972359 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.972411 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.972429 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.972451 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:43 crc kubenswrapper[4766]: I0102 00:08:43.972467 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:43Z","lastTransitionTime":"2026-01-02T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.076071 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.076147 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.076169 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.076197 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.076218 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:44Z","lastTransitionTime":"2026-01-02T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.178897 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.178946 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.178960 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.178978 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.178990 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:44Z","lastTransitionTime":"2026-01-02T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.281877 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.281954 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.281974 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.281998 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.282014 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:44Z","lastTransitionTime":"2026-01-02T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.385070 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.385166 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.385219 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.385250 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.385270 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:44Z","lastTransitionTime":"2026-01-02T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.450465 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs\") pod \"network-metrics-daemon-zd2t8\" (UID: \"f869b527-abf2-400f-90c2-ebee595564a2\") " pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:44 crc kubenswrapper[4766]: E0102 00:08:44.450708 4766 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 02 00:08:44 crc kubenswrapper[4766]: E0102 00:08:44.450893 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs podName:f869b527-abf2-400f-90c2-ebee595564a2 nodeName:}" failed. No retries permitted until 2026-01-02 00:09:48.450815013 +0000 UTC m=+163.249302197 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs") pod "network-metrics-daemon-zd2t8" (UID: "f869b527-abf2-400f-90c2-ebee595564a2") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.488528 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.488594 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.488611 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.488634 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.488652 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:44Z","lastTransitionTime":"2026-01-02T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.533133 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.533142 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.533147 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.533169 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:44 crc kubenswrapper[4766]: E0102 00:08:44.533334 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:44 crc kubenswrapper[4766]: E0102 00:08:44.533574 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:44 crc kubenswrapper[4766]: E0102 00:08:44.533723 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:44 crc kubenswrapper[4766]: E0102 00:08:44.533793 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.591309 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.591374 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.591394 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.591418 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.591437 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:44Z","lastTransitionTime":"2026-01-02T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.694613 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.694694 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.694718 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.694746 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.694768 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:44Z","lastTransitionTime":"2026-01-02T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.797431 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.797489 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.797504 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.797528 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.797545 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:44Z","lastTransitionTime":"2026-01-02T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.900703 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.900750 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.900767 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.900789 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:44 crc kubenswrapper[4766]: I0102 00:08:44.900805 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:44Z","lastTransitionTime":"2026-01-02T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.003889 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.003959 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.003981 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.004016 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.004040 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:45Z","lastTransitionTime":"2026-01-02T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.106902 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.106990 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.107016 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.107046 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.107068 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:45Z","lastTransitionTime":"2026-01-02T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.210445 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.210524 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.210548 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.210579 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.210609 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:45Z","lastTransitionTime":"2026-01-02T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.313673 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.313744 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.313761 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.313789 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.313805 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:45Z","lastTransitionTime":"2026-01-02T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.416921 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.416985 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.417003 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.417031 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.417050 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:45Z","lastTransitionTime":"2026-01-02T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.521337 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.521449 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.521479 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.521510 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.521533 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:45Z","lastTransitionTime":"2026-01-02T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.592359 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-mdmsr" podStartSLOduration=80.592332203 podStartE2EDuration="1m20.592332203s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:08:45.579323328 +0000 UTC m=+100.377810542" watchObservedRunningTime="2026-01-02 00:08:45.592332203 +0000 UTC m=+100.390819397" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.623897 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=27.623610572 podStartE2EDuration="27.623610572s" podCreationTimestamp="2026-01-02 00:08:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:08:45.592644003 +0000 UTC m=+100.391131197" watchObservedRunningTime="2026-01-02 00:08:45.623610572 +0000 UTC m=+100.422097746" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.624951 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.625005 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.625021 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.625083 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.625102 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:45Z","lastTransitionTime":"2026-01-02T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.626646 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=51.626604096 podStartE2EDuration="51.626604096s" podCreationTimestamp="2026-01-02 00:07:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:08:45.622512991 +0000 UTC m=+100.421000185" watchObservedRunningTime="2026-01-02 00:08:45.626604096 +0000 UTC m=+100.425091270" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.693834 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-lmc2p" podStartSLOduration=80.693814032 podStartE2EDuration="1m20.693814032s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:08:45.678695768 +0000 UTC m=+100.477182972" watchObservedRunningTime="2026-01-02 00:08:45.693814032 +0000 UTC m=+100.492301196" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.694017 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-95799" podStartSLOduration=79.694010388 podStartE2EDuration="1m19.694010388s" podCreationTimestamp="2026-01-02 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:08:45.694011428 +0000 UTC m=+100.492498632" watchObservedRunningTime="2026-01-02 00:08:45.694010388 +0000 UTC m=+100.492497552" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.727192 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.727240 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.727256 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.727280 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.727297 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:45Z","lastTransitionTime":"2026-01-02T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.758245 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=81.75822886 podStartE2EDuration="1m21.75822886s" podCreationTimestamp="2026-01-02 00:07:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:08:45.731275524 +0000 UTC m=+100.529762698" watchObservedRunningTime="2026-01-02 00:08:45.75822886 +0000 UTC m=+100.556716014" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.758639 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=19.758635021 podStartE2EDuration="19.758635021s" podCreationTimestamp="2026-01-02 00:08:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:08:45.757608522 +0000 UTC m=+100.556095696" watchObservedRunningTime="2026-01-02 00:08:45.758635021 +0000 UTC m=+100.557122175" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.830034 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.830074 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.830087 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.830106 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.830121 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:45Z","lastTransitionTime":"2026-01-02T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.837251 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-s78r2" podStartSLOduration=80.837230197 podStartE2EDuration="1m20.837230197s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:08:45.802713299 +0000 UTC m=+100.601200473" watchObservedRunningTime="2026-01-02 00:08:45.837230197 +0000 UTC m=+100.635717371" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.850934 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=78.850911842 podStartE2EDuration="1m18.850911842s" podCreationTimestamp="2026-01-02 00:07:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:08:45.849984765 +0000 UTC m=+100.648471959" watchObservedRunningTime="2026-01-02 00:08:45.850911842 +0000 UTC m=+100.649399016" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.892295 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podStartSLOduration=80.892270193 podStartE2EDuration="1m20.892270193s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:08:45.891739138 +0000 UTC m=+100.690226292" watchObservedRunningTime="2026-01-02 00:08:45.892270193 +0000 UTC m=+100.690757357" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.904985 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-56zlf" podStartSLOduration=80.904955259 podStartE2EDuration="1m20.904955259s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:08:45.903155818 +0000 UTC m=+100.701642972" watchObservedRunningTime="2026-01-02 00:08:45.904955259 +0000 UTC m=+100.703442453" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.933130 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.933178 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.933195 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.933218 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:45 crc kubenswrapper[4766]: I0102 00:08:45.933236 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:45Z","lastTransitionTime":"2026-01-02T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.036279 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.036334 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.036352 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.036376 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.036394 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:46Z","lastTransitionTime":"2026-01-02T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.138635 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.138703 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.138719 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.138739 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.138756 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:46Z","lastTransitionTime":"2026-01-02T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.241390 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.241425 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.241435 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.241452 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.241461 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:46Z","lastTransitionTime":"2026-01-02T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.345097 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.345190 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.345211 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.345236 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.345253 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:46Z","lastTransitionTime":"2026-01-02T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.448716 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.448819 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.448834 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.448887 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.448913 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:46Z","lastTransitionTime":"2026-01-02T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.534147 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.534185 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:46 crc kubenswrapper[4766]: E0102 00:08:46.534327 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.534357 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.534148 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:46 crc kubenswrapper[4766]: E0102 00:08:46.534454 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:46 crc kubenswrapper[4766]: E0102 00:08:46.534575 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:46 crc kubenswrapper[4766]: E0102 00:08:46.534711 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.551669 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.551721 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.551738 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.551759 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.551772 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:46Z","lastTransitionTime":"2026-01-02T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.654987 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.655040 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.655056 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.655084 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.655101 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:46Z","lastTransitionTime":"2026-01-02T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.757800 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.757862 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.757879 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.757897 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.757907 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:46Z","lastTransitionTime":"2026-01-02T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.860532 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.860567 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.860575 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.860587 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.860595 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:46Z","lastTransitionTime":"2026-01-02T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.963078 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.963143 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.963165 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.963188 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:46 crc kubenswrapper[4766]: I0102 00:08:46.963204 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:46Z","lastTransitionTime":"2026-01-02T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.066305 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.066422 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.066448 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.066479 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.066500 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:47Z","lastTransitionTime":"2026-01-02T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.170057 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.170147 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.170213 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.170247 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.170274 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:47Z","lastTransitionTime":"2026-01-02T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.273455 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.273517 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.273540 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.273569 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.273589 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:47Z","lastTransitionTime":"2026-01-02T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.376929 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.376993 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.377018 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.377050 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.377074 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:47Z","lastTransitionTime":"2026-01-02T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.479631 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.479706 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.479725 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.479750 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.479767 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:47Z","lastTransitionTime":"2026-01-02T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.582672 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.582746 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.582768 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.582797 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.582818 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:47Z","lastTransitionTime":"2026-01-02T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.686404 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.686474 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.686500 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.686530 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.686555 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:47Z","lastTransitionTime":"2026-01-02T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.790622 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.790708 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.790733 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.790763 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.790787 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:47Z","lastTransitionTime":"2026-01-02T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.894135 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.894184 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.894202 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.894223 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.894239 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:47Z","lastTransitionTime":"2026-01-02T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.997286 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.997366 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.997390 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.997420 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:47 crc kubenswrapper[4766]: I0102 00:08:47.997445 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:47Z","lastTransitionTime":"2026-01-02T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.100918 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.101001 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.101034 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.101063 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.101085 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:48Z","lastTransitionTime":"2026-01-02T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.204352 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.204422 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.204442 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.204471 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.204496 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:48Z","lastTransitionTime":"2026-01-02T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.307332 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.307396 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.307415 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.307440 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.307457 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:48Z","lastTransitionTime":"2026-01-02T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.410898 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.410961 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.410985 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.411012 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.411035 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:48Z","lastTransitionTime":"2026-01-02T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.513611 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.513649 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.513671 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.513697 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.513713 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:48Z","lastTransitionTime":"2026-01-02T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.533613 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:48 crc kubenswrapper[4766]: E0102 00:08:48.533726 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.533902 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:48 crc kubenswrapper[4766]: E0102 00:08:48.533973 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.534117 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.534155 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:48 crc kubenswrapper[4766]: E0102 00:08:48.534440 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:48 crc kubenswrapper[4766]: E0102 00:08:48.534578 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.616604 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.616675 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.616694 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.616718 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.616735 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:48Z","lastTransitionTime":"2026-01-02T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.719977 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.720057 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.720074 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.720503 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.720553 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:48Z","lastTransitionTime":"2026-01-02T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.823242 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.823321 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.823338 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.823361 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.823380 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:48Z","lastTransitionTime":"2026-01-02T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.925765 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.925837 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.925894 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.925920 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:48 crc kubenswrapper[4766]: I0102 00:08:48.925937 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:48Z","lastTransitionTime":"2026-01-02T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.029361 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.029427 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.029460 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.029490 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.029507 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:49Z","lastTransitionTime":"2026-01-02T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.133123 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.133184 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.133204 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.133228 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.133245 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:49Z","lastTransitionTime":"2026-01-02T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.236473 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.236559 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.236585 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.236613 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.236632 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:49Z","lastTransitionTime":"2026-01-02T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.339190 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.339241 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.339256 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.339275 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.339287 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:49Z","lastTransitionTime":"2026-01-02T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.441913 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.441973 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.441991 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.442014 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.442032 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:49Z","lastTransitionTime":"2026-01-02T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.544823 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.544904 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.544927 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.544961 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.544982 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:49Z","lastTransitionTime":"2026-01-02T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.648377 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.648469 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.648487 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.648510 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.648526 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:49Z","lastTransitionTime":"2026-01-02T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.751890 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.751975 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.751992 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.752014 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.752054 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:49Z","lastTransitionTime":"2026-01-02T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.855035 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.855113 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.855133 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.855158 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.855175 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:49Z","lastTransitionTime":"2026-01-02T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.957989 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.958044 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.958063 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.958083 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:49 crc kubenswrapper[4766]: I0102 00:08:49.958100 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:49Z","lastTransitionTime":"2026-01-02T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.060024 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.060064 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.060072 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.060086 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.060095 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:50Z","lastTransitionTime":"2026-01-02T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.163158 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.163271 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.163308 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.163340 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.163362 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:50Z","lastTransitionTime":"2026-01-02T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.266518 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.266594 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.266619 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.266649 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.266673 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:50Z","lastTransitionTime":"2026-01-02T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.370029 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.370232 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.370267 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.370291 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.370342 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:50Z","lastTransitionTime":"2026-01-02T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.474189 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.474258 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.474282 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.474311 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.474332 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:50Z","lastTransitionTime":"2026-01-02T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.534005 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.534227 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.534368 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:50 crc kubenswrapper[4766]: E0102 00:08:50.534428 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.534449 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:50 crc kubenswrapper[4766]: E0102 00:08:50.534621 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:50 crc kubenswrapper[4766]: E0102 00:08:50.534814 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:50 crc kubenswrapper[4766]: E0102 00:08:50.534989 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.578930 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.579015 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.579043 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.579076 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.579105 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:50Z","lastTransitionTime":"2026-01-02T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.683080 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.683161 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.683182 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.683212 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.683235 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:50Z","lastTransitionTime":"2026-01-02T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.786769 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.786831 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.786903 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.786956 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.786977 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:50Z","lastTransitionTime":"2026-01-02T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.889350 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.889417 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.889439 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.889468 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.889489 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:50Z","lastTransitionTime":"2026-01-02T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.991983 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.992127 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.992148 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.992213 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:50 crc kubenswrapper[4766]: I0102 00:08:50.992247 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:50Z","lastTransitionTime":"2026-01-02T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.095765 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.095884 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.095915 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.095952 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.095979 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:51Z","lastTransitionTime":"2026-01-02T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.199134 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.199202 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.199222 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.199248 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.199265 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:51Z","lastTransitionTime":"2026-01-02T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.302519 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.302622 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.302643 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.302666 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.302685 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:51Z","lastTransitionTime":"2026-01-02T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.406397 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.406450 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.406467 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.406491 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.406509 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:51Z","lastTransitionTime":"2026-01-02T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.425213 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.425278 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.425298 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.425326 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.425382 4766 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-02T00:08:51Z","lastTransitionTime":"2026-01-02T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.493779 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh"] Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.494494 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.498173 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.498259 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.503484 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.503656 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.630935 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a3d08cff-6706-493f-9eea-47d05cfb45a2-service-ca\") pod \"cluster-version-operator-5c965bbfc6-84bdh\" (UID: \"a3d08cff-6706-493f-9eea-47d05cfb45a2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.631158 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a3d08cff-6706-493f-9eea-47d05cfb45a2-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-84bdh\" (UID: \"a3d08cff-6706-493f-9eea-47d05cfb45a2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.631223 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a3d08cff-6706-493f-9eea-47d05cfb45a2-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-84bdh\" (UID: \"a3d08cff-6706-493f-9eea-47d05cfb45a2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.631279 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a3d08cff-6706-493f-9eea-47d05cfb45a2-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-84bdh\" (UID: \"a3d08cff-6706-493f-9eea-47d05cfb45a2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.631382 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a3d08cff-6706-493f-9eea-47d05cfb45a2-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-84bdh\" (UID: \"a3d08cff-6706-493f-9eea-47d05cfb45a2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.732923 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a3d08cff-6706-493f-9eea-47d05cfb45a2-service-ca\") pod \"cluster-version-operator-5c965bbfc6-84bdh\" (UID: \"a3d08cff-6706-493f-9eea-47d05cfb45a2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.732996 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a3d08cff-6706-493f-9eea-47d05cfb45a2-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-84bdh\" (UID: \"a3d08cff-6706-493f-9eea-47d05cfb45a2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.733027 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a3d08cff-6706-493f-9eea-47d05cfb45a2-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-84bdh\" (UID: \"a3d08cff-6706-493f-9eea-47d05cfb45a2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.733060 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a3d08cff-6706-493f-9eea-47d05cfb45a2-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-84bdh\" (UID: \"a3d08cff-6706-493f-9eea-47d05cfb45a2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.733098 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a3d08cff-6706-493f-9eea-47d05cfb45a2-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-84bdh\" (UID: \"a3d08cff-6706-493f-9eea-47d05cfb45a2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.733179 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a3d08cff-6706-493f-9eea-47d05cfb45a2-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-84bdh\" (UID: \"a3d08cff-6706-493f-9eea-47d05cfb45a2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.733298 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a3d08cff-6706-493f-9eea-47d05cfb45a2-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-84bdh\" (UID: \"a3d08cff-6706-493f-9eea-47d05cfb45a2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.734951 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a3d08cff-6706-493f-9eea-47d05cfb45a2-service-ca\") pod \"cluster-version-operator-5c965bbfc6-84bdh\" (UID: \"a3d08cff-6706-493f-9eea-47d05cfb45a2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.742812 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a3d08cff-6706-493f-9eea-47d05cfb45a2-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-84bdh\" (UID: \"a3d08cff-6706-493f-9eea-47d05cfb45a2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.763568 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a3d08cff-6706-493f-9eea-47d05cfb45a2-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-84bdh\" (UID: \"a3d08cff-6706-493f-9eea-47d05cfb45a2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" Jan 02 00:08:51 crc kubenswrapper[4766]: I0102 00:08:51.818978 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" Jan 02 00:08:52 crc kubenswrapper[4766]: I0102 00:08:52.242425 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" event={"ID":"a3d08cff-6706-493f-9eea-47d05cfb45a2","Type":"ContainerStarted","Data":"9b7d44a339b0bbf4be23c55ecf10aab074e1ee5748422d60c3e895f003bbb144"} Jan 02 00:08:52 crc kubenswrapper[4766]: I0102 00:08:52.242924 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" event={"ID":"a3d08cff-6706-493f-9eea-47d05cfb45a2","Type":"ContainerStarted","Data":"f77f0eefe19273e4e2089fb1dfd10be9c8247d80d779464afdea8cb409c37ab1"} Jan 02 00:08:52 crc kubenswrapper[4766]: I0102 00:08:52.265365 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-84bdh" podStartSLOduration=87.265346349 podStartE2EDuration="1m27.265346349s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:08:52.264032102 +0000 UTC m=+107.062519256" watchObservedRunningTime="2026-01-02 00:08:52.265346349 +0000 UTC m=+107.063833523" Jan 02 00:08:52 crc kubenswrapper[4766]: I0102 00:08:52.534063 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:52 crc kubenswrapper[4766]: I0102 00:08:52.534115 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:52 crc kubenswrapper[4766]: E0102 00:08:52.534183 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:52 crc kubenswrapper[4766]: E0102 00:08:52.534250 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:52 crc kubenswrapper[4766]: I0102 00:08:52.534115 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:52 crc kubenswrapper[4766]: E0102 00:08:52.534344 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:52 crc kubenswrapper[4766]: I0102 00:08:52.534453 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:52 crc kubenswrapper[4766]: E0102 00:08:52.534528 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:54 crc kubenswrapper[4766]: I0102 00:08:54.533049 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:54 crc kubenswrapper[4766]: I0102 00:08:54.533113 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:54 crc kubenswrapper[4766]: I0102 00:08:54.533072 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:54 crc kubenswrapper[4766]: I0102 00:08:54.533072 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:54 crc kubenswrapper[4766]: E0102 00:08:54.533437 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:54 crc kubenswrapper[4766]: E0102 00:08:54.533495 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:54 crc kubenswrapper[4766]: E0102 00:08:54.533553 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:54 crc kubenswrapper[4766]: E0102 00:08:54.533609 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:54 crc kubenswrapper[4766]: I0102 00:08:54.535270 4766 scope.go:117] "RemoveContainer" containerID="e5bf838ca0799a30c0a828e12202e2c66af3473686ae514b8bbd94c369d9ac58" Jan 02 00:08:54 crc kubenswrapper[4766]: E0102 00:08:54.535591 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" Jan 02 00:08:56 crc kubenswrapper[4766]: I0102 00:08:56.533274 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:56 crc kubenswrapper[4766]: I0102 00:08:56.533379 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:56 crc kubenswrapper[4766]: E0102 00:08:56.533444 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:56 crc kubenswrapper[4766]: E0102 00:08:56.533576 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:56 crc kubenswrapper[4766]: I0102 00:08:56.534698 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:56 crc kubenswrapper[4766]: I0102 00:08:56.534883 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:56 crc kubenswrapper[4766]: E0102 00:08:56.535207 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:08:56 crc kubenswrapper[4766]: E0102 00:08:56.535669 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:58 crc kubenswrapper[4766]: I0102 00:08:58.533789 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:08:58 crc kubenswrapper[4766]: I0102 00:08:58.533870 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:08:58 crc kubenswrapper[4766]: I0102 00:08:58.533999 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:08:58 crc kubenswrapper[4766]: E0102 00:08:58.533989 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:08:58 crc kubenswrapper[4766]: E0102 00:08:58.534097 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:08:58 crc kubenswrapper[4766]: I0102 00:08:58.534112 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:08:58 crc kubenswrapper[4766]: E0102 00:08:58.534179 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:08:58 crc kubenswrapper[4766]: E0102 00:08:58.534300 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:09:00 crc kubenswrapper[4766]: I0102 00:09:00.271487 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lmc2p_adc58583-1116-4066-9ca8-da659b367839/kube-multus/1.log" Jan 02 00:09:00 crc kubenswrapper[4766]: I0102 00:09:00.272328 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lmc2p_adc58583-1116-4066-9ca8-da659b367839/kube-multus/0.log" Jan 02 00:09:00 crc kubenswrapper[4766]: I0102 00:09:00.272415 4766 generic.go:334] "Generic (PLEG): container finished" podID="adc58583-1116-4066-9ca8-da659b367839" containerID="76e33f463186e4646a6c1af42efed221a9d759604e9de18c83e8eaaf1d52a1ea" exitCode=1 Jan 02 00:09:00 crc kubenswrapper[4766]: I0102 00:09:00.272463 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lmc2p" event={"ID":"adc58583-1116-4066-9ca8-da659b367839","Type":"ContainerDied","Data":"76e33f463186e4646a6c1af42efed221a9d759604e9de18c83e8eaaf1d52a1ea"} Jan 02 00:09:00 crc kubenswrapper[4766]: I0102 00:09:00.272520 4766 scope.go:117] "RemoveContainer" containerID="f94d215c1ac3f96b7ac0a5824228ec59133d9c5421048f9037ccbaee2d83f80d" Jan 02 00:09:00 crc kubenswrapper[4766]: I0102 00:09:00.273143 4766 scope.go:117] "RemoveContainer" containerID="76e33f463186e4646a6c1af42efed221a9d759604e9de18c83e8eaaf1d52a1ea" Jan 02 00:09:00 crc kubenswrapper[4766]: E0102 00:09:00.273373 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-lmc2p_openshift-multus(adc58583-1116-4066-9ca8-da659b367839)\"" pod="openshift-multus/multus-lmc2p" podUID="adc58583-1116-4066-9ca8-da659b367839" Jan 02 00:09:00 crc kubenswrapper[4766]: I0102 00:09:00.533825 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:00 crc kubenswrapper[4766]: I0102 00:09:00.533951 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:00 crc kubenswrapper[4766]: I0102 00:09:00.533950 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:00 crc kubenswrapper[4766]: E0102 00:09:00.534007 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:09:00 crc kubenswrapper[4766]: I0102 00:09:00.534201 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:00 crc kubenswrapper[4766]: E0102 00:09:00.534256 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:09:00 crc kubenswrapper[4766]: E0102 00:09:00.534206 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:09:00 crc kubenswrapper[4766]: E0102 00:09:00.534329 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:09:01 crc kubenswrapper[4766]: I0102 00:09:01.278934 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lmc2p_adc58583-1116-4066-9ca8-da659b367839/kube-multus/1.log" Jan 02 00:09:02 crc kubenswrapper[4766]: I0102 00:09:02.533940 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:02 crc kubenswrapper[4766]: I0102 00:09:02.534019 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:02 crc kubenswrapper[4766]: I0102 00:09:02.534072 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:02 crc kubenswrapper[4766]: E0102 00:09:02.534118 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:09:02 crc kubenswrapper[4766]: I0102 00:09:02.533949 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:02 crc kubenswrapper[4766]: E0102 00:09:02.534275 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:09:02 crc kubenswrapper[4766]: E0102 00:09:02.534394 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:09:02 crc kubenswrapper[4766]: E0102 00:09:02.534548 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:09:04 crc kubenswrapper[4766]: I0102 00:09:04.533074 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:04 crc kubenswrapper[4766]: E0102 00:09:04.534136 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:09:04 crc kubenswrapper[4766]: I0102 00:09:04.533209 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:04 crc kubenswrapper[4766]: E0102 00:09:04.534288 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:09:04 crc kubenswrapper[4766]: I0102 00:09:04.533150 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:04 crc kubenswrapper[4766]: I0102 00:09:04.533250 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:04 crc kubenswrapper[4766]: E0102 00:09:04.534381 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:09:04 crc kubenswrapper[4766]: E0102 00:09:04.534520 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:09:05 crc kubenswrapper[4766]: E0102 00:09:05.538419 4766 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 02 00:09:05 crc kubenswrapper[4766]: E0102 00:09:05.670260 4766 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 02 00:09:06 crc kubenswrapper[4766]: I0102 00:09:06.533429 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:06 crc kubenswrapper[4766]: I0102 00:09:06.533474 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:06 crc kubenswrapper[4766]: E0102 00:09:06.534162 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:09:06 crc kubenswrapper[4766]: I0102 00:09:06.533568 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:06 crc kubenswrapper[4766]: E0102 00:09:06.534260 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:09:06 crc kubenswrapper[4766]: I0102 00:09:06.533532 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:06 crc kubenswrapper[4766]: E0102 00:09:06.534384 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:09:06 crc kubenswrapper[4766]: E0102 00:09:06.534589 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:09:08 crc kubenswrapper[4766]: I0102 00:09:08.533466 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:08 crc kubenswrapper[4766]: I0102 00:09:08.533521 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:08 crc kubenswrapper[4766]: I0102 00:09:08.533587 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:08 crc kubenswrapper[4766]: I0102 00:09:08.533946 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:08 crc kubenswrapper[4766]: E0102 00:09:08.533929 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:09:08 crc kubenswrapper[4766]: E0102 00:09:08.534088 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:09:08 crc kubenswrapper[4766]: E0102 00:09:08.534251 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:09:08 crc kubenswrapper[4766]: I0102 00:09:08.535437 4766 scope.go:117] "RemoveContainer" containerID="e5bf838ca0799a30c0a828e12202e2c66af3473686ae514b8bbd94c369d9ac58" Jan 02 00:09:08 crc kubenswrapper[4766]: E0102 00:09:08.535793 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xps4h_openshift-ovn-kubernetes(66b7fd44-a7ca-46a0-a585-cb37baccc49e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" Jan 02 00:09:08 crc kubenswrapper[4766]: E0102 00:09:08.536104 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:09:10 crc kubenswrapper[4766]: I0102 00:09:10.533824 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:10 crc kubenswrapper[4766]: I0102 00:09:10.533828 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:10 crc kubenswrapper[4766]: I0102 00:09:10.533939 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:10 crc kubenswrapper[4766]: I0102 00:09:10.533939 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:10 crc kubenswrapper[4766]: E0102 00:09:10.535523 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:09:10 crc kubenswrapper[4766]: E0102 00:09:10.535641 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:09:10 crc kubenswrapper[4766]: E0102 00:09:10.535722 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:09:10 crc kubenswrapper[4766]: E0102 00:09:10.535835 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:09:10 crc kubenswrapper[4766]: E0102 00:09:10.672003 4766 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 02 00:09:11 crc kubenswrapper[4766]: I0102 00:09:11.534436 4766 scope.go:117] "RemoveContainer" containerID="76e33f463186e4646a6c1af42efed221a9d759604e9de18c83e8eaaf1d52a1ea" Jan 02 00:09:12 crc kubenswrapper[4766]: I0102 00:09:12.325645 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lmc2p_adc58583-1116-4066-9ca8-da659b367839/kube-multus/1.log" Jan 02 00:09:12 crc kubenswrapper[4766]: I0102 00:09:12.325700 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lmc2p" event={"ID":"adc58583-1116-4066-9ca8-da659b367839","Type":"ContainerStarted","Data":"e096c1e05db993415c68cbf0f3312bb208f649a8216f555dbcb6cb286169308d"} Jan 02 00:09:12 crc kubenswrapper[4766]: I0102 00:09:12.533338 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:12 crc kubenswrapper[4766]: I0102 00:09:12.533470 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:12 crc kubenswrapper[4766]: E0102 00:09:12.533575 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:09:12 crc kubenswrapper[4766]: I0102 00:09:12.533613 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:12 crc kubenswrapper[4766]: I0102 00:09:12.533657 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:12 crc kubenswrapper[4766]: E0102 00:09:12.533880 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:09:12 crc kubenswrapper[4766]: E0102 00:09:12.534047 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:09:12 crc kubenswrapper[4766]: E0102 00:09:12.534104 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:09:14 crc kubenswrapper[4766]: I0102 00:09:14.533290 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:14 crc kubenswrapper[4766]: I0102 00:09:14.533341 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:14 crc kubenswrapper[4766]: I0102 00:09:14.533341 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:14 crc kubenswrapper[4766]: E0102 00:09:14.533537 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:09:14 crc kubenswrapper[4766]: I0102 00:09:14.533580 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:14 crc kubenswrapper[4766]: E0102 00:09:14.533741 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:09:14 crc kubenswrapper[4766]: E0102 00:09:14.533897 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:09:14 crc kubenswrapper[4766]: E0102 00:09:14.534033 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:09:15 crc kubenswrapper[4766]: E0102 00:09:15.674052 4766 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 02 00:09:16 crc kubenswrapper[4766]: I0102 00:09:16.533246 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:16 crc kubenswrapper[4766]: I0102 00:09:16.533291 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:16 crc kubenswrapper[4766]: I0102 00:09:16.533565 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:16 crc kubenswrapper[4766]: E0102 00:09:16.533622 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:09:16 crc kubenswrapper[4766]: I0102 00:09:16.533664 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:16 crc kubenswrapper[4766]: E0102 00:09:16.533795 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:09:16 crc kubenswrapper[4766]: E0102 00:09:16.533955 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:09:16 crc kubenswrapper[4766]: E0102 00:09:16.534115 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:09:18 crc kubenswrapper[4766]: I0102 00:09:18.533038 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:18 crc kubenswrapper[4766]: E0102 00:09:18.533465 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:09:18 crc kubenswrapper[4766]: I0102 00:09:18.533081 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:18 crc kubenswrapper[4766]: I0102 00:09:18.533081 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:18 crc kubenswrapper[4766]: E0102 00:09:18.533553 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:09:18 crc kubenswrapper[4766]: I0102 00:09:18.533102 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:18 crc kubenswrapper[4766]: E0102 00:09:18.533706 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:09:18 crc kubenswrapper[4766]: E0102 00:09:18.534031 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:09:20 crc kubenswrapper[4766]: I0102 00:09:20.533751 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:20 crc kubenswrapper[4766]: I0102 00:09:20.533751 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:20 crc kubenswrapper[4766]: E0102 00:09:20.533965 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:09:20 crc kubenswrapper[4766]: I0102 00:09:20.534018 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:20 crc kubenswrapper[4766]: I0102 00:09:20.534143 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:20 crc kubenswrapper[4766]: E0102 00:09:20.534157 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:09:20 crc kubenswrapper[4766]: E0102 00:09:20.534201 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:09:20 crc kubenswrapper[4766]: E0102 00:09:20.534272 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:09:20 crc kubenswrapper[4766]: E0102 00:09:20.675936 4766 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 02 00:09:22 crc kubenswrapper[4766]: I0102 00:09:22.533951 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:22 crc kubenswrapper[4766]: I0102 00:09:22.533951 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:22 crc kubenswrapper[4766]: I0102 00:09:22.534998 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:22 crc kubenswrapper[4766]: I0102 00:09:22.535220 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:22 crc kubenswrapper[4766]: E0102 00:09:22.535229 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:09:22 crc kubenswrapper[4766]: E0102 00:09:22.535337 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:09:22 crc kubenswrapper[4766]: E0102 00:09:22.535416 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:09:22 crc kubenswrapper[4766]: E0102 00:09:22.535482 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:09:23 crc kubenswrapper[4766]: I0102 00:09:23.534607 4766 scope.go:117] "RemoveContainer" containerID="e5bf838ca0799a30c0a828e12202e2c66af3473686ae514b8bbd94c369d9ac58" Jan 02 00:09:24 crc kubenswrapper[4766]: I0102 00:09:24.368101 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovnkube-controller/3.log" Jan 02 00:09:24 crc kubenswrapper[4766]: I0102 00:09:24.372436 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerStarted","Data":"65cb8e526ed37ac431d4269c186608701a392fb988e60503cca00e5ada8a3486"} Jan 02 00:09:24 crc kubenswrapper[4766]: I0102 00:09:24.372967 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:09:24 crc kubenswrapper[4766]: I0102 00:09:24.410106 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podStartSLOduration=119.410080715 podStartE2EDuration="1m59.410080715s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:24.407603444 +0000 UTC m=+139.206090628" watchObservedRunningTime="2026-01-02 00:09:24.410080715 +0000 UTC m=+139.208567889" Jan 02 00:09:24 crc kubenswrapper[4766]: I0102 00:09:24.442385 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-zd2t8"] Jan 02 00:09:24 crc kubenswrapper[4766]: I0102 00:09:24.442514 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:24 crc kubenswrapper[4766]: E0102 00:09:24.442645 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:09:24 crc kubenswrapper[4766]: I0102 00:09:24.533694 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:24 crc kubenswrapper[4766]: I0102 00:09:24.533753 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:24 crc kubenswrapper[4766]: I0102 00:09:24.533758 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:24 crc kubenswrapper[4766]: E0102 00:09:24.533820 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:09:24 crc kubenswrapper[4766]: E0102 00:09:24.533982 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:09:24 crc kubenswrapper[4766]: E0102 00:09:24.534046 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:09:25 crc kubenswrapper[4766]: E0102 00:09:25.677460 4766 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 02 00:09:26 crc kubenswrapper[4766]: I0102 00:09:26.533765 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:26 crc kubenswrapper[4766]: E0102 00:09:26.533992 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:09:26 crc kubenswrapper[4766]: I0102 00:09:26.534045 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:26 crc kubenswrapper[4766]: I0102 00:09:26.534077 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:26 crc kubenswrapper[4766]: I0102 00:09:26.534178 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:26 crc kubenswrapper[4766]: E0102 00:09:26.534304 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:09:26 crc kubenswrapper[4766]: E0102 00:09:26.534416 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:09:26 crc kubenswrapper[4766]: E0102 00:09:26.534626 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:09:28 crc kubenswrapper[4766]: I0102 00:09:28.533959 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:28 crc kubenswrapper[4766]: I0102 00:09:28.533986 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:28 crc kubenswrapper[4766]: I0102 00:09:28.533988 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:28 crc kubenswrapper[4766]: I0102 00:09:28.533960 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:28 crc kubenswrapper[4766]: E0102 00:09:28.534086 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:09:28 crc kubenswrapper[4766]: E0102 00:09:28.534261 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:09:28 crc kubenswrapper[4766]: E0102 00:09:28.534386 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:09:28 crc kubenswrapper[4766]: E0102 00:09:28.534497 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:09:30 crc kubenswrapper[4766]: I0102 00:09:30.533337 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:30 crc kubenswrapper[4766]: I0102 00:09:30.533355 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:30 crc kubenswrapper[4766]: I0102 00:09:30.533427 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:30 crc kubenswrapper[4766]: E0102 00:09:30.534037 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zd2t8" podUID="f869b527-abf2-400f-90c2-ebee595564a2" Jan 02 00:09:30 crc kubenswrapper[4766]: E0102 00:09:30.533830 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:09:30 crc kubenswrapper[4766]: E0102 00:09:30.534153 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 02 00:09:30 crc kubenswrapper[4766]: I0102 00:09:30.533423 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:30 crc kubenswrapper[4766]: E0102 00:09:30.534301 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.859313 4766 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.908181 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.909057 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.909201 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-cnd4q"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.911759 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.912046 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.915298 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.915460 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.915744 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wvh2t"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.916207 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-j4kpl"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.916539 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.916980 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.917086 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.919312 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.920777 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.920885 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.921399 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.922818 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.923620 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chm7b"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.924533 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chm7b" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.927037 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.927246 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.932226 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.932422 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.932593 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.932753 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.932938 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.935321 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.936004 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.936546 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.938516 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.938727 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.939430 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.939615 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.948869 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.949126 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.949167 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.949258 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.949422 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.949484 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.949631 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.949741 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.949801 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.949885 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.949908 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.949818 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.950058 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.950127 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.950144 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.950231 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.950324 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.952925 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-8bc2w"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.953618 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-8bc2w" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.953686 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.953831 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.953971 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.954284 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.954317 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.957099 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.962418 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.963830 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.964738 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.967728 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.968033 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s5ggw"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.976050 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.976480 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-s5ggw" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.976724 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.976909 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.977009 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.977115 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.977215 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.978175 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.978324 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.978427 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.978618 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.979124 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cps54"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.979474 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.979775 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-pthnj"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.980203 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-pruner-29455200-8qg7p"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.980515 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-cps54" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.980565 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29455200-8qg7p" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.981113 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-czkht"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.982040 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.982074 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.982132 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.983953 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.987976 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-482gx"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.988328 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c7s4x"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.988621 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-22rvv"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.988961 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-krqht"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.989274 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q77bn"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.989658 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.990380 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.991304 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.991759 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c7s4x" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.992045 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.992276 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.993345 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c7x7f"] Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.993855 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c7x7f" Jan 02 00:09:31 crc kubenswrapper[4766]: I0102 00:09:31.997026 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.001139 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.001606 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.001765 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.001929 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.001997 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.002024 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.002177 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"pruner-dockercfg-p7bcw" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.002229 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"serviceca" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.002275 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.002384 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.002419 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.002525 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.002543 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.002181 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.002661 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.002837 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.002903 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.002992 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.003071 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.002675 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.003251 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.003356 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.005610 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.006181 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/07522b89-e97c-4363-a223-e57f11fa7ec5-encryption-config\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.006237 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvf7z\" (UniqueName: \"kubernetes.io/projected/07522b89-e97c-4363-a223-e57f11fa7ec5-kube-api-access-mvf7z\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.006263 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/07522b89-e97c-4363-a223-e57f11fa7ec5-etcd-client\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.006289 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/07522b89-e97c-4363-a223-e57f11fa7ec5-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.006305 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/07522b89-e97c-4363-a223-e57f11fa7ec5-audit-dir\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.006323 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/07522b89-e97c-4363-a223-e57f11fa7ec5-serving-cert\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.006336 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/07522b89-e97c-4363-a223-e57f11fa7ec5-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.006364 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/07522b89-e97c-4363-a223-e57f11fa7ec5-audit-policies\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.006819 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.011107 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.011179 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.011456 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.016135 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42r8w"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.017115 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.017440 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.017947 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.018021 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42r8w" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.018155 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.018379 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.018603 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.019808 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.020071 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.020251 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.020984 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.021352 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.021731 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.021829 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.021921 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.022159 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.022537 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.023662 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.023749 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.055149 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.055323 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.055883 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.057194 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.057219 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-m8p7l"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.057791 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7tp"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.058204 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-m8p7l" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.058311 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2srpw"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.058223 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7tp" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.059725 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2srpw" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.062467 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.062524 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-b6fbc"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.064403 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b6fbc" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.082159 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.085496 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.086362 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.088916 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.089626 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qlzhc"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.090129 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.090435 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.098826 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.099854 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.100457 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.101482 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8g66k"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.102459 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8g66k" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.106827 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fpfk\" (UniqueName: \"kubernetes.io/projected/89725c43-8074-47e4-9464-52dac202626c-kube-api-access-9fpfk\") pod \"image-pruner-29455200-8qg7p\" (UID: \"89725c43-8074-47e4-9464-52dac202626c\") " pod="openshift-image-registry/image-pruner-29455200-8qg7p" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.106875 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d3116b8-d190-455f-b0a8-f44f7818b034-config\") pod \"machine-approver-56656f9798-cj6b2\" (UID: \"3d3116b8-d190-455f-b0a8-f44f7818b034\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.106894 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/35c9a208-651e-4453-975f-2d069276a9aa-audit-dir\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.106910 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a59b5466-b9dd-45f1-be0b-18b7afa27942-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-pthnj\" (UID: \"a59b5466-b9dd-45f1-be0b-18b7afa27942\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.106952 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.106970 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/bb03fd80-3959-4cea-9739-3580d6310a64-service-ca\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.106985 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7zj5\" (UniqueName: \"kubernetes.io/projected/100eb09d-e755-4cd9-b31f-e2b6920d91f0-kube-api-access-m7zj5\") pod \"cluster-image-registry-operator-dc59b4c8b-nxmh7\" (UID: \"100eb09d-e755-4cd9-b31f-e2b6920d91f0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107025 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/73d33fe2-474d-4bb3-8e6f-d80ea22650fd-metrics-certs\") pod \"router-default-5444994796-22rvv\" (UID: \"73d33fe2-474d-4bb3-8e6f-d80ea22650fd\") " pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107039 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3d3116b8-d190-455f-b0a8-f44f7818b034-auth-proxy-config\") pod \"machine-approver-56656f9798-cj6b2\" (UID: \"3d3116b8-d190-455f-b0a8-f44f7818b034\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107054 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trh8p\" (UniqueName: \"kubernetes.io/projected/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-kube-api-access-trh8p\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107071 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhmdd\" (UniqueName: \"kubernetes.io/projected/88e0ee0c-3493-45ce-9dcc-ab7d4958b82a-kube-api-access-lhmdd\") pod \"openshift-apiserver-operator-796bbdcf4f-c7s4x\" (UID: \"88e0ee0c-3493-45ce-9dcc-ab7d4958b82a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c7s4x" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107103 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107120 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/73d33fe2-474d-4bb3-8e6f-d80ea22650fd-default-certificate\") pod \"router-default-5444994796-22rvv\" (UID: \"73d33fe2-474d-4bb3-8e6f-d80ea22650fd\") " pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107134 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a59b5466-b9dd-45f1-be0b-18b7afa27942-config\") pod \"authentication-operator-69f744f599-pthnj\" (UID: \"a59b5466-b9dd-45f1-be0b-18b7afa27942\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107149 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfl46\" (UniqueName: \"kubernetes.io/projected/6e809e90-d3c7-41ea-b346-103b603acc41-kube-api-access-xfl46\") pod \"route-controller-manager-6576b87f9c-hqfgm\" (UID: \"6e809e90-d3c7-41ea-b346-103b603acc41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107185 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/73d33fe2-474d-4bb3-8e6f-d80ea22650fd-stats-auth\") pod \"router-default-5444994796-22rvv\" (UID: \"73d33fe2-474d-4bb3-8e6f-d80ea22650fd\") " pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107199 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bb03fd80-3959-4cea-9739-3580d6310a64-trusted-ca-bundle\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107213 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107230 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/bb03fd80-3959-4cea-9739-3580d6310a64-console-serving-cert\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107259 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/bb03fd80-3959-4cea-9739-3580d6310a64-console-oauth-config\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107274 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7pns\" (UniqueName: \"kubernetes.io/projected/a59b5466-b9dd-45f1-be0b-18b7afa27942-kube-api-access-c7pns\") pod \"authentication-operator-69f744f599-pthnj\" (UID: \"a59b5466-b9dd-45f1-be0b-18b7afa27942\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107291 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e809e90-d3c7-41ea-b346-103b603acc41-serving-cert\") pod \"route-controller-manager-6576b87f9c-hqfgm\" (UID: \"6e809e90-d3c7-41ea-b346-103b603acc41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107307 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-serving-cert\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107338 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/100eb09d-e755-4cd9-b31f-e2b6920d91f0-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nxmh7\" (UID: \"100eb09d-e755-4cd9-b31f-e2b6920d91f0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107353 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68jhz\" (UniqueName: \"kubernetes.io/projected/35c9a208-651e-4453-975f-2d069276a9aa-kube-api-access-68jhz\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107370 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-etcd-client\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107384 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-client-ca\") pod \"controller-manager-879f6c89f-wvh2t\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107415 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88e0ee0c-3493-45ce-9dcc-ab7d4958b82a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-c7s4x\" (UID: \"88e0ee0c-3493-45ce-9dcc-ab7d4958b82a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c7s4x" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107432 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f64cs\" (UniqueName: \"kubernetes.io/projected/afef674f-42ec-4083-ba3c-803546393a46-kube-api-access-f64cs\") pod \"controller-manager-879f6c89f-wvh2t\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107449 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f019efc-00d2-4d74-bebf-70782e52182b-trusted-ca\") pod \"console-operator-58897d9998-s5ggw\" (UID: \"1f019efc-00d2-4d74-bebf-70782e52182b\") " pod="openshift-console-operator/console-operator-58897d9998-s5ggw" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107464 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-config\") pod \"controller-manager-879f6c89f-wvh2t\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107504 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/afef674f-42ec-4083-ba3c-803546393a46-serving-cert\") pod \"controller-manager-879f6c89f-wvh2t\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107526 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107597 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-etcd-serving-ca\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107736 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88e0ee0c-3493-45ce-9dcc-ab7d4958b82a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-c7s4x\" (UID: \"88e0ee0c-3493-45ce-9dcc-ab7d4958b82a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c7s4x" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107780 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e809e90-d3c7-41ea-b346-103b603acc41-config\") pod \"route-controller-manager-6576b87f9c-hqfgm\" (UID: \"6e809e90-d3c7-41ea-b346-103b603acc41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107815 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/100eb09d-e755-4cd9-b31f-e2b6920d91f0-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nxmh7\" (UID: \"100eb09d-e755-4cd9-b31f-e2b6920d91f0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.107869 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvf7z\" (UniqueName: \"kubernetes.io/projected/07522b89-e97c-4363-a223-e57f11fa7ec5-kube-api-access-mvf7z\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.108039 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-image-import-ca\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.108070 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-audit-dir\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.108090 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/bb03fd80-3959-4cea-9739-3580d6310a64-oauth-serving-cert\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.108115 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f019efc-00d2-4d74-bebf-70782e52182b-config\") pod \"console-operator-58897d9998-s5ggw\" (UID: \"1f019efc-00d2-4d74-bebf-70782e52182b\") " pod="openshift-console-operator/console-operator-58897d9998-s5ggw" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.108138 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.108167 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-node-pullsecrets\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.108187 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rz68p\" (UniqueName: \"kubernetes.io/projected/509add4d-67be-4dff-8194-55516bc7bbd7-kube-api-access-rz68p\") pod \"downloads-7954f5f757-8bc2w\" (UID: \"509add4d-67be-4dff-8194-55516bc7bbd7\") " pod="openshift-console/downloads-7954f5f757-8bc2w" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.108205 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f019efc-00d2-4d74-bebf-70782e52182b-serving-cert\") pod \"console-operator-58897d9998-s5ggw\" (UID: \"1f019efc-00d2-4d74-bebf-70782e52182b\") " pod="openshift-console-operator/console-operator-58897d9998-s5ggw" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.108230 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.108289 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/07522b89-e97c-4363-a223-e57f11fa7ec5-etcd-client\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.108867 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/07522b89-e97c-4363-a223-e57f11fa7ec5-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.108920 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/07522b89-e97c-4363-a223-e57f11fa7ec5-audit-dir\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.108958 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hk88g\" (UniqueName: \"kubernetes.io/projected/bb03fd80-3959-4cea-9739-3580d6310a64-kube-api-access-hk88g\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.108994 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4ff7\" (UniqueName: \"kubernetes.io/projected/1f019efc-00d2-4d74-bebf-70782e52182b-kube-api-access-z4ff7\") pod \"console-operator-58897d9998-s5ggw\" (UID: \"1f019efc-00d2-4d74-bebf-70782e52182b\") " pod="openshift-console-operator/console-operator-58897d9998-s5ggw" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109040 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6e809e90-d3c7-41ea-b346-103b603acc41-client-ca\") pod \"route-controller-manager-6576b87f9c-hqfgm\" (UID: \"6e809e90-d3c7-41ea-b346-103b603acc41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109070 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109108 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9b5m\" (UniqueName: \"kubernetes.io/projected/73d33fe2-474d-4bb3-8e6f-d80ea22650fd-kube-api-access-p9b5m\") pod \"router-default-5444994796-22rvv\" (UID: \"73d33fe2-474d-4bb3-8e6f-d80ea22650fd\") " pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109122 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/89725c43-8074-47e4-9464-52dac202626c-serviceca\") pod \"image-pruner-29455200-8qg7p\" (UID: \"89725c43-8074-47e4-9464-52dac202626c\") " pod="openshift-image-registry/image-pruner-29455200-8qg7p" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109154 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8r9kw\" (UniqueName: \"kubernetes.io/projected/3d3116b8-d190-455f-b0a8-f44f7818b034-kube-api-access-8r9kw\") pod \"machine-approver-56656f9798-cj6b2\" (UID: \"3d3116b8-d190-455f-b0a8-f44f7818b034\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109191 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/07522b89-e97c-4363-a223-e57f11fa7ec5-serving-cert\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109207 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-encryption-config\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109262 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/3d3116b8-d190-455f-b0a8-f44f7818b034-machine-approver-tls\") pod \"machine-approver-56656f9798-cj6b2\" (UID: \"3d3116b8-d190-455f-b0a8-f44f7818b034\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109277 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-audit-policies\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109293 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109308 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109339 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109354 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a59b5466-b9dd-45f1-be0b-18b7afa27942-service-ca-bundle\") pod \"authentication-operator-69f744f599-pthnj\" (UID: \"a59b5466-b9dd-45f1-be0b-18b7afa27942\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109369 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/bb03fd80-3959-4cea-9739-3580d6310a64-console-config\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109386 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/07522b89-e97c-4363-a223-e57f11fa7ec5-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109417 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-config\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109431 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-audit\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109507 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/07522b89-e97c-4363-a223-e57f11fa7ec5-audit-policies\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109523 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-trusted-ca-bundle\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109564 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/73d33fe2-474d-4bb3-8e6f-d80ea22650fd-service-ca-bundle\") pod \"router-default-5444994796-22rvv\" (UID: \"73d33fe2-474d-4bb3-8e6f-d80ea22650fd\") " pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109579 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a59b5466-b9dd-45f1-be0b-18b7afa27942-serving-cert\") pod \"authentication-operator-69f744f599-pthnj\" (UID: \"a59b5466-b9dd-45f1-be0b-18b7afa27942\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109594 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109612 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/07522b89-e97c-4363-a223-e57f11fa7ec5-encryption-config\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109643 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-wvh2t\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109662 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/100eb09d-e755-4cd9-b31f-e2b6920d91f0-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nxmh7\" (UID: \"100eb09d-e755-4cd9-b31f-e2b6920d91f0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.109912 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/07522b89-e97c-4363-a223-e57f11fa7ec5-audit-dir\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.110222 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/07522b89-e97c-4363-a223-e57f11fa7ec5-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.110601 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.111330 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/07522b89-e97c-4363-a223-e57f11fa7ec5-audit-policies\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.111432 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.111658 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-s6kgk"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.112330 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-s6kgk" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.115627 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.116220 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.117097 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/07522b89-e97c-4363-a223-e57f11fa7ec5-etcd-client\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.117916 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/07522b89-e97c-4363-a223-e57f11fa7ec5-serving-cert\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.118179 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/07522b89-e97c-4363-a223-e57f11fa7ec5-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.126090 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.131198 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.131351 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-6ldzt"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.131518 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/07522b89-e97c-4363-a223-e57f11fa7ec5-encryption-config\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.131935 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-6ldzt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.133321 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.134290 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-gsbjs"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.135092 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.135208 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-gsbjs" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.136030 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.137331 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l8k7h"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.138125 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l8k7h" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.138580 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wvh2t"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.140124 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-mnk85"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.141083 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-mnk85" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.142087 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.145663 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chm7b"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.148073 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.151721 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-j4kpl"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.162210 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.164686 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s5ggw"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.167319 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2srpw"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.168312 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c7s4x"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.169364 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-8bc2w"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.170417 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.173242 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.178806 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q77bn"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.185401 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c7x7f"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.189509 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-cnd4q"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.190687 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42r8w"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.191661 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-czkht"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.192329 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.192623 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cps54"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.193594 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-482gx"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.195942 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-xhj52"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.196630 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-xhj52" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.197058 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-xldzf"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.197742 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-xldzf" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.198001 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-gsbjs"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.198961 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-b6fbc"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.199944 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-xhj52"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.200953 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qlzhc"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.201916 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7tp"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.202871 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-m8p7l"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.204102 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.205100 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.206118 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-pthnj"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.207149 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.208043 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.209345 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29455200-8qg7p"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.210342 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-krqht"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.211339 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.211356 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-s6kgk"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.211868 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/73d33fe2-474d-4bb3-8e6f-d80ea22650fd-metrics-certs\") pod \"router-default-5444994796-22rvv\" (UID: \"73d33fe2-474d-4bb3-8e6f-d80ea22650fd\") " pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.211902 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3d3116b8-d190-455f-b0a8-f44f7818b034-auth-proxy-config\") pod \"machine-approver-56656f9798-cj6b2\" (UID: \"3d3116b8-d190-455f-b0a8-f44f7818b034\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.211927 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trh8p\" (UniqueName: \"kubernetes.io/projected/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-kube-api-access-trh8p\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.211948 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhmdd\" (UniqueName: \"kubernetes.io/projected/88e0ee0c-3493-45ce-9dcc-ab7d4958b82a-kube-api-access-lhmdd\") pod \"openshift-apiserver-operator-796bbdcf4f-c7s4x\" (UID: \"88e0ee0c-3493-45ce-9dcc-ab7d4958b82a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c7s4x" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.211969 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/73d33fe2-474d-4bb3-8e6f-d80ea22650fd-default-certificate\") pod \"router-default-5444994796-22rvv\" (UID: \"73d33fe2-474d-4bb3-8e6f-d80ea22650fd\") " pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.211990 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212012 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a59b5466-b9dd-45f1-be0b-18b7afa27942-config\") pod \"authentication-operator-69f744f599-pthnj\" (UID: \"a59b5466-b9dd-45f1-be0b-18b7afa27942\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212029 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfl46\" (UniqueName: \"kubernetes.io/projected/6e809e90-d3c7-41ea-b346-103b603acc41-kube-api-access-xfl46\") pod \"route-controller-manager-6576b87f9c-hqfgm\" (UID: \"6e809e90-d3c7-41ea-b346-103b603acc41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212054 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/73d33fe2-474d-4bb3-8e6f-d80ea22650fd-stats-auth\") pod \"router-default-5444994796-22rvv\" (UID: \"73d33fe2-474d-4bb3-8e6f-d80ea22650fd\") " pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212073 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bb03fd80-3959-4cea-9739-3580d6310a64-trusted-ca-bundle\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212092 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212112 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/bb03fd80-3959-4cea-9739-3580d6310a64-console-serving-cert\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212132 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/bb03fd80-3959-4cea-9739-3580d6310a64-console-oauth-config\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212156 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7pns\" (UniqueName: \"kubernetes.io/projected/a59b5466-b9dd-45f1-be0b-18b7afa27942-kube-api-access-c7pns\") pod \"authentication-operator-69f744f599-pthnj\" (UID: \"a59b5466-b9dd-45f1-be0b-18b7afa27942\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212176 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e809e90-d3c7-41ea-b346-103b603acc41-serving-cert\") pod \"route-controller-manager-6576b87f9c-hqfgm\" (UID: \"6e809e90-d3c7-41ea-b346-103b603acc41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212198 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-serving-cert\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212217 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/100eb09d-e755-4cd9-b31f-e2b6920d91f0-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nxmh7\" (UID: \"100eb09d-e755-4cd9-b31f-e2b6920d91f0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212241 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68jhz\" (UniqueName: \"kubernetes.io/projected/35c9a208-651e-4453-975f-2d069276a9aa-kube-api-access-68jhz\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212263 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-etcd-client\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212285 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-client-ca\") pod \"controller-manager-879f6c89f-wvh2t\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212308 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88e0ee0c-3493-45ce-9dcc-ab7d4958b82a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-c7s4x\" (UID: \"88e0ee0c-3493-45ce-9dcc-ab7d4958b82a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c7s4x" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212332 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f64cs\" (UniqueName: \"kubernetes.io/projected/afef674f-42ec-4083-ba3c-803546393a46-kube-api-access-f64cs\") pod \"controller-manager-879f6c89f-wvh2t\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212355 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-config\") pod \"controller-manager-879f6c89f-wvh2t\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212378 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/afef674f-42ec-4083-ba3c-803546393a46-serving-cert\") pod \"controller-manager-879f6c89f-wvh2t\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212405 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f019efc-00d2-4d74-bebf-70782e52182b-trusted-ca\") pod \"console-operator-58897d9998-s5ggw\" (UID: \"1f019efc-00d2-4d74-bebf-70782e52182b\") " pod="openshift-console-operator/console-operator-58897d9998-s5ggw" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212454 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212478 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-etcd-serving-ca\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212498 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88e0ee0c-3493-45ce-9dcc-ab7d4958b82a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-c7s4x\" (UID: \"88e0ee0c-3493-45ce-9dcc-ab7d4958b82a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c7s4x" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212521 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e809e90-d3c7-41ea-b346-103b603acc41-config\") pod \"route-controller-manager-6576b87f9c-hqfgm\" (UID: \"6e809e90-d3c7-41ea-b346-103b603acc41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212541 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/100eb09d-e755-4cd9-b31f-e2b6920d91f0-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nxmh7\" (UID: \"100eb09d-e755-4cd9-b31f-e2b6920d91f0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212569 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/bb03fd80-3959-4cea-9739-3580d6310a64-oauth-serving-cert\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212588 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f019efc-00d2-4d74-bebf-70782e52182b-config\") pod \"console-operator-58897d9998-s5ggw\" (UID: \"1f019efc-00d2-4d74-bebf-70782e52182b\") " pod="openshift-console-operator/console-operator-58897d9998-s5ggw" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212607 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-image-import-ca\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212625 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-audit-dir\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212646 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212667 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-node-pullsecrets\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212687 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rz68p\" (UniqueName: \"kubernetes.io/projected/509add4d-67be-4dff-8194-55516bc7bbd7-kube-api-access-rz68p\") pod \"downloads-7954f5f757-8bc2w\" (UID: \"509add4d-67be-4dff-8194-55516bc7bbd7\") " pod="openshift-console/downloads-7954f5f757-8bc2w" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212709 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f019efc-00d2-4d74-bebf-70782e52182b-serving-cert\") pod \"console-operator-58897d9998-s5ggw\" (UID: \"1f019efc-00d2-4d74-bebf-70782e52182b\") " pod="openshift-console-operator/console-operator-58897d9998-s5ggw" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212733 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212754 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hk88g\" (UniqueName: \"kubernetes.io/projected/bb03fd80-3959-4cea-9739-3580d6310a64-kube-api-access-hk88g\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212776 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4ff7\" (UniqueName: \"kubernetes.io/projected/1f019efc-00d2-4d74-bebf-70782e52182b-kube-api-access-z4ff7\") pod \"console-operator-58897d9998-s5ggw\" (UID: \"1f019efc-00d2-4d74-bebf-70782e52182b\") " pod="openshift-console-operator/console-operator-58897d9998-s5ggw" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212807 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6e809e90-d3c7-41ea-b346-103b603acc41-client-ca\") pod \"route-controller-manager-6576b87f9c-hqfgm\" (UID: \"6e809e90-d3c7-41ea-b346-103b603acc41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212828 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212870 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/89725c43-8074-47e4-9464-52dac202626c-serviceca\") pod \"image-pruner-29455200-8qg7p\" (UID: \"89725c43-8074-47e4-9464-52dac202626c\") " pod="openshift-image-registry/image-pruner-29455200-8qg7p" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212895 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8r9kw\" (UniqueName: \"kubernetes.io/projected/3d3116b8-d190-455f-b0a8-f44f7818b034-kube-api-access-8r9kw\") pod \"machine-approver-56656f9798-cj6b2\" (UID: \"3d3116b8-d190-455f-b0a8-f44f7818b034\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212923 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9b5m\" (UniqueName: \"kubernetes.io/projected/73d33fe2-474d-4bb3-8e6f-d80ea22650fd-kube-api-access-p9b5m\") pod \"router-default-5444994796-22rvv\" (UID: \"73d33fe2-474d-4bb3-8e6f-d80ea22650fd\") " pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212944 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-encryption-config\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212967 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.212988 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a59b5466-b9dd-45f1-be0b-18b7afa27942-service-ca-bundle\") pod \"authentication-operator-69f744f599-pthnj\" (UID: \"a59b5466-b9dd-45f1-be0b-18b7afa27942\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213010 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/bb03fd80-3959-4cea-9739-3580d6310a64-console-config\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213031 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/3d3116b8-d190-455f-b0a8-f44f7818b034-machine-approver-tls\") pod \"machine-approver-56656f9798-cj6b2\" (UID: \"3d3116b8-d190-455f-b0a8-f44f7818b034\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213053 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-audit-policies\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213063 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a59b5466-b9dd-45f1-be0b-18b7afa27942-config\") pod \"authentication-operator-69f744f599-pthnj\" (UID: \"a59b5466-b9dd-45f1-be0b-18b7afa27942\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213075 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213098 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213121 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-config\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213142 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-audit\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213163 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-trusted-ca-bundle\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213198 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/73d33fe2-474d-4bb3-8e6f-d80ea22650fd-service-ca-bundle\") pod \"router-default-5444994796-22rvv\" (UID: \"73d33fe2-474d-4bb3-8e6f-d80ea22650fd\") " pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213222 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a59b5466-b9dd-45f1-be0b-18b7afa27942-serving-cert\") pod \"authentication-operator-69f744f599-pthnj\" (UID: \"a59b5466-b9dd-45f1-be0b-18b7afa27942\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213245 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213268 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-wvh2t\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213278 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213292 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/100eb09d-e755-4cd9-b31f-e2b6920d91f0-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nxmh7\" (UID: \"100eb09d-e755-4cd9-b31f-e2b6920d91f0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213367 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a59b5466-b9dd-45f1-be0b-18b7afa27942-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-pthnj\" (UID: \"a59b5466-b9dd-45f1-be0b-18b7afa27942\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213400 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fpfk\" (UniqueName: \"kubernetes.io/projected/89725c43-8074-47e4-9464-52dac202626c-kube-api-access-9fpfk\") pod \"image-pruner-29455200-8qg7p\" (UID: \"89725c43-8074-47e4-9464-52dac202626c\") " pod="openshift-image-registry/image-pruner-29455200-8qg7p" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213434 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d3116b8-d190-455f-b0a8-f44f7818b034-config\") pod \"machine-approver-56656f9798-cj6b2\" (UID: \"3d3116b8-d190-455f-b0a8-f44f7818b034\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213465 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/35c9a208-651e-4453-975f-2d069276a9aa-audit-dir\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213509 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213545 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/bb03fd80-3959-4cea-9739-3580d6310a64-service-ca\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213574 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7zj5\" (UniqueName: \"kubernetes.io/projected/100eb09d-e755-4cd9-b31f-e2b6920d91f0-kube-api-access-m7zj5\") pod \"cluster-image-registry-operator-dc59b4c8b-nxmh7\" (UID: \"100eb09d-e755-4cd9-b31f-e2b6920d91f0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213816 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/bb03fd80-3959-4cea-9739-3580d6310a64-oauth-serving-cert\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.213888 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bb03fd80-3959-4cea-9739-3580d6310a64-trusted-ca-bundle\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.214066 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3d3116b8-d190-455f-b0a8-f44f7818b034-auth-proxy-config\") pod \"machine-approver-56656f9798-cj6b2\" (UID: \"3d3116b8-d190-455f-b0a8-f44f7818b034\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.214457 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-xldzf"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.214505 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-audit-dir\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.214629 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f019efc-00d2-4d74-bebf-70782e52182b-config\") pod \"console-operator-58897d9998-s5ggw\" (UID: \"1f019efc-00d2-4d74-bebf-70782e52182b\") " pod="openshift-console-operator/console-operator-58897d9998-s5ggw" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.214782 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-node-pullsecrets\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.214966 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-client-ca\") pod \"controller-manager-879f6c89f-wvh2t\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.215072 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d3116b8-d190-455f-b0a8-f44f7818b034-config\") pod \"machine-approver-56656f9798-cj6b2\" (UID: \"3d3116b8-d190-455f-b0a8-f44f7818b034\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.215123 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/35c9a208-651e-4453-975f-2d069276a9aa-audit-dir\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.215205 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a59b5466-b9dd-45f1-be0b-18b7afa27942-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-pthnj\" (UID: \"a59b5466-b9dd-45f1-be0b-18b7afa27942\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.215492 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-config\") pod \"controller-manager-879f6c89f-wvh2t\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.215597 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6e809e90-d3c7-41ea-b346-103b603acc41-client-ca\") pod \"route-controller-manager-6576b87f9c-hqfgm\" (UID: \"6e809e90-d3c7-41ea-b346-103b603acc41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.215600 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-image-import-ca\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.215858 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.216301 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/bb03fd80-3959-4cea-9739-3580d6310a64-console-config\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.216363 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/89725c43-8074-47e4-9464-52dac202626c-serviceca\") pod \"image-pruner-29455200-8qg7p\" (UID: \"89725c43-8074-47e4-9464-52dac202626c\") " pod="openshift-image-registry/image-pruner-29455200-8qg7p" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.216490 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-etcd-serving-ca\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.216633 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/bb03fd80-3959-4cea-9739-3580d6310a64-service-ca\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.217072 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-audit\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.217083 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a59b5466-b9dd-45f1-be0b-18b7afa27942-service-ca-bundle\") pod \"authentication-operator-69f744f599-pthnj\" (UID: \"a59b5466-b9dd-45f1-be0b-18b7afa27942\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.217365 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.217459 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f019efc-00d2-4d74-bebf-70782e52182b-trusted-ca\") pod \"console-operator-58897d9998-s5ggw\" (UID: \"1f019efc-00d2-4d74-bebf-70782e52182b\") " pod="openshift-console-operator/console-operator-58897d9998-s5ggw" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.217663 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-config\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.217998 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.218210 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/73d33fe2-474d-4bb3-8e6f-d80ea22650fd-service-ca-bundle\") pod \"router-default-5444994796-22rvv\" (UID: \"73d33fe2-474d-4bb3-8e6f-d80ea22650fd\") " pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.218588 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.218714 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88e0ee0c-3493-45ce-9dcc-ab7d4958b82a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-c7s4x\" (UID: \"88e0ee0c-3493-45ce-9dcc-ab7d4958b82a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c7s4x" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.218785 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e809e90-d3c7-41ea-b346-103b603acc41-config\") pod \"route-controller-manager-6576b87f9c-hqfgm\" (UID: \"6e809e90-d3c7-41ea-b346-103b603acc41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.219004 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.219142 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/100eb09d-e755-4cd9-b31f-e2b6920d91f0-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nxmh7\" (UID: \"100eb09d-e755-4cd9-b31f-e2b6920d91f0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.219559 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-6ldzt"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.219735 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.219858 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e809e90-d3c7-41ea-b346-103b603acc41-serving-cert\") pod \"route-controller-manager-6576b87f9c-hqfgm\" (UID: \"6e809e90-d3c7-41ea-b346-103b603acc41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.219986 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-wvh2t\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.220054 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-trusted-ca-bundle\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.220318 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.220448 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a59b5466-b9dd-45f1-be0b-18b7afa27942-serving-cert\") pod \"authentication-operator-69f744f599-pthnj\" (UID: \"a59b5466-b9dd-45f1-be0b-18b7afa27942\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.220900 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-etcd-client\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.220942 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l8k7h"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.221636 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/3d3116b8-d190-455f-b0a8-f44f7818b034-machine-approver-tls\") pod \"machine-approver-56656f9798-cj6b2\" (UID: \"3d3116b8-d190-455f-b0a8-f44f7818b034\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.221711 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-serving-cert\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.221865 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/bb03fd80-3959-4cea-9739-3580d6310a64-console-serving-cert\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.222059 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-encryption-config\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.222064 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.222176 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/afef674f-42ec-4083-ba3c-803546393a46-serving-cert\") pod \"controller-manager-879f6c89f-wvh2t\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.222384 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8g66k"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.222752 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/73d33fe2-474d-4bb3-8e6f-d80ea22650fd-stats-auth\") pod \"router-default-5444994796-22rvv\" (UID: \"73d33fe2-474d-4bb3-8e6f-d80ea22650fd\") " pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.222900 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88e0ee0c-3493-45ce-9dcc-ab7d4958b82a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-c7s4x\" (UID: \"88e0ee0c-3493-45ce-9dcc-ab7d4958b82a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c7s4x" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.223310 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f019efc-00d2-4d74-bebf-70782e52182b-serving-cert\") pod \"console-operator-58897d9998-s5ggw\" (UID: \"1f019efc-00d2-4d74-bebf-70782e52182b\") " pod="openshift-console-operator/console-operator-58897d9998-s5ggw" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.223695 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.224216 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/bb03fd80-3959-4cea-9739-3580d6310a64-console-oauth-config\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.224393 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/73d33fe2-474d-4bb3-8e6f-d80ea22650fd-metrics-certs\") pod \"router-default-5444994796-22rvv\" (UID: \"73d33fe2-474d-4bb3-8e6f-d80ea22650fd\") " pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.224563 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wjtkd"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.225586 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.226285 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.226283 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wjtkd"] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.227445 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/100eb09d-e755-4cd9-b31f-e2b6920d91f0-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nxmh7\" (UID: \"100eb09d-e755-4cd9-b31f-e2b6920d91f0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.229955 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.231214 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.238494 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-audit-policies\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.251250 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.257005 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/73d33fe2-474d-4bb3-8e6f-d80ea22650fd-default-certificate\") pod \"router-default-5444994796-22rvv\" (UID: \"73d33fe2-474d-4bb3-8e6f-d80ea22650fd\") " pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.271253 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.291737 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.311659 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.336692 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.346154 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.351224 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.371502 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.391303 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.398368 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.410820 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.432376 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.450988 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.471880 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.492035 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.511619 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.516892 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.517017 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.517073 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.517102 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.517128 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:32 crc kubenswrapper[4766]: E0102 00:09:32.517202 4766 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:09:32 crc kubenswrapper[4766]: E0102 00:09:32.517229 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:11:34.517184728 +0000 UTC m=+269.315671922 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:32 crc kubenswrapper[4766]: E0102 00:09:32.517284 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:11:34.51726518 +0000 UTC m=+269.315752474 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 02 00:09:32 crc kubenswrapper[4766]: E0102 00:09:32.517325 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:09:32 crc kubenswrapper[4766]: E0102 00:09:32.517355 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:09:32 crc kubenswrapper[4766]: E0102 00:09:32.517377 4766 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:09:32 crc kubenswrapper[4766]: E0102 00:09:32.517441 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-02 00:11:34.517419864 +0000 UTC m=+269.315907048 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:09:32 crc kubenswrapper[4766]: E0102 00:09:32.517380 4766 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:09:32 crc kubenswrapper[4766]: E0102 00:09:32.517516 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-02 00:11:34.517499056 +0000 UTC m=+269.315986330 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 02 00:09:32 crc kubenswrapper[4766]: E0102 00:09:32.517816 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 02 00:09:32 crc kubenswrapper[4766]: E0102 00:09:32.517892 4766 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 02 00:09:32 crc kubenswrapper[4766]: E0102 00:09:32.517909 4766 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:09:32 crc kubenswrapper[4766]: E0102 00:09:32.517967 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-02 00:11:34.517951167 +0000 UTC m=+269.316438361 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.531677 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.533044 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.533047 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.533126 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.533402 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.551565 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.573226 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.591154 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.651385 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.672295 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.692387 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.720051 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.730676 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.751046 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.771110 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.791567 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.812943 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.831749 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.851380 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.872144 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.892203 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.911997 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.932372 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.952047 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.971952 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 02 00:09:32 crc kubenswrapper[4766]: I0102 00:09:32.992512 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.011758 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.032470 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.050946 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.071450 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.091531 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.110328 4766 request.go:700] Waited for 1.019664917s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/secrets?fieldSelector=metadata.name%3Dmarketplace-operator-dockercfg-5nsgg&limit=500&resourceVersion=0 Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.112502 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.140055 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.159758 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.171746 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.191836 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.211473 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.231141 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.251694 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.292790 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.302730 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.312308 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.331322 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.372610 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.377624 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvf7z\" (UniqueName: \"kubernetes.io/projected/07522b89-e97c-4363-a223-e57f11fa7ec5-kube-api-access-mvf7z\") pod \"apiserver-7bbb656c7d-cv2dl\" (UID: \"07522b89-e97c-4363-a223-e57f11fa7ec5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.391625 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.411980 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.431499 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.439789 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.452111 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.472056 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.492693 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.512526 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.531666 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.554969 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.571808 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.591932 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.612117 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.631630 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.639156 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl"] Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.651831 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.672364 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.691495 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.712595 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.732088 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.751889 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.772997 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.793088 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.812074 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.832099 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.851760 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.871891 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.891942 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.911736 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.932003 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.951744 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 02 00:09:33 crc kubenswrapper[4766]: I0102 00:09:33.992581 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhmdd\" (UniqueName: \"kubernetes.io/projected/88e0ee0c-3493-45ce-9dcc-ab7d4958b82a-kube-api-access-lhmdd\") pod \"openshift-apiserver-operator-796bbdcf4f-c7s4x\" (UID: \"88e0ee0c-3493-45ce-9dcc-ab7d4958b82a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c7s4x" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.006969 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c7s4x" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.015769 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfl46\" (UniqueName: \"kubernetes.io/projected/6e809e90-d3c7-41ea-b346-103b603acc41-kube-api-access-xfl46\") pod \"route-controller-manager-6576b87f9c-hqfgm\" (UID: \"6e809e90-d3c7-41ea-b346-103b603acc41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.028538 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f64cs\" (UniqueName: \"kubernetes.io/projected/afef674f-42ec-4083-ba3c-803546393a46-kube-api-access-f64cs\") pod \"controller-manager-879f6c89f-wvh2t\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.049454 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7pns\" (UniqueName: \"kubernetes.io/projected/a59b5466-b9dd-45f1-be0b-18b7afa27942-kube-api-access-c7pns\") pod \"authentication-operator-69f744f599-pthnj\" (UID: \"a59b5466-b9dd-45f1-be0b-18b7afa27942\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.071535 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68jhz\" (UniqueName: \"kubernetes.io/projected/35c9a208-651e-4453-975f-2d069276a9aa-kube-api-access-68jhz\") pod \"oauth-openshift-558db77b4-czkht\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.084949 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.094219 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7zj5\" (UniqueName: \"kubernetes.io/projected/100eb09d-e755-4cd9-b31f-e2b6920d91f0-kube-api-access-m7zj5\") pod \"cluster-image-registry-operator-dc59b4c8b-nxmh7\" (UID: \"100eb09d-e755-4cd9-b31f-e2b6920d91f0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.115317 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/100eb09d-e755-4cd9-b31f-e2b6920d91f0-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nxmh7\" (UID: \"100eb09d-e755-4cd9-b31f-e2b6920d91f0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.127500 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hk88g\" (UniqueName: \"kubernetes.io/projected/bb03fd80-3959-4cea-9739-3580d6310a64-kube-api-access-hk88g\") pod \"console-f9d7485db-482gx\" (UID: \"bb03fd80-3959-4cea-9739-3580d6310a64\") " pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.129953 4766 request.go:700] Waited for 1.915323708s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/serviceaccounts/pruner/token Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.149875 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fpfk\" (UniqueName: \"kubernetes.io/projected/89725c43-8074-47e4-9464-52dac202626c-kube-api-access-9fpfk\") pod \"image-pruner-29455200-8qg7p\" (UID: \"89725c43-8074-47e4-9464-52dac202626c\") " pod="openshift-image-registry/image-pruner-29455200-8qg7p" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.158315 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.169223 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4ff7\" (UniqueName: \"kubernetes.io/projected/1f019efc-00d2-4d74-bebf-70782e52182b-kube-api-access-z4ff7\") pod \"console-operator-58897d9998-s5ggw\" (UID: \"1f019efc-00d2-4d74-bebf-70782e52182b\") " pod="openshift-console-operator/console-operator-58897d9998-s5ggw" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.188814 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rz68p\" (UniqueName: \"kubernetes.io/projected/509add4d-67be-4dff-8194-55516bc7bbd7-kube-api-access-rz68p\") pod \"downloads-7954f5f757-8bc2w\" (UID: \"509add4d-67be-4dff-8194-55516bc7bbd7\") " pod="openshift-console/downloads-7954f5f757-8bc2w" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.210120 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8r9kw\" (UniqueName: \"kubernetes.io/projected/3d3116b8-d190-455f-b0a8-f44f7818b034-kube-api-access-8r9kw\") pod \"machine-approver-56656f9798-cj6b2\" (UID: \"3d3116b8-d190-455f-b0a8-f44f7818b034\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.218461 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c7s4x"] Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.229730 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9b5m\" (UniqueName: \"kubernetes.io/projected/73d33fe2-474d-4bb3-8e6f-d80ea22650fd-kube-api-access-p9b5m\") pod \"router-default-5444994796-22rvv\" (UID: \"73d33fe2-474d-4bb3-8e6f-d80ea22650fd\") " pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.236426 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-8bc2w" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.246136 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trh8p\" (UniqueName: \"kubernetes.io/projected/f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361-kube-api-access-trh8p\") pod \"apiserver-76f77b778f-cnd4q\" (UID: \"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361\") " pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.250292 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.250829 4766 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.255907 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-s5ggw" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.276167 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.276686 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29455200-8qg7p" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.290079 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.293634 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.295278 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm"] Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.300038 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:34 crc kubenswrapper[4766]: W0102 00:09:34.311117 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e809e90_d3c7_41ea_b346_103b603acc41.slice/crio-954d9bb56031721f04f901cb748bb91be3905e6f9ab9cc9dc2672869769960cf WatchSource:0}: Error finding container 954d9bb56031721f04f901cb748bb91be3905e6f9ab9cc9dc2672869769960cf: Status 404 returned error can't find the container with id 954d9bb56031721f04f901cb748bb91be3905e6f9ab9cc9dc2672869769960cf Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.313773 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.316441 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.332975 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 02 00:09:34 crc kubenswrapper[4766]: W0102 00:09:34.337184 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod73d33fe2_474d_4bb3_8e6f_d80ea22650fd.slice/crio-603879f830ace87d827eb99d050020e75c130bc744ee1e908dba0f6354ac6093 WatchSource:0}: Error finding container 603879f830ace87d827eb99d050020e75c130bc744ee1e908dba0f6354ac6093: Status 404 returned error can't find the container with id 603879f830ace87d827eb99d050020e75c130bc744ee1e908dba0f6354ac6093 Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.352169 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.366048 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.371246 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.372823 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.400566 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.413114 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.440742 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wvh2t"] Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.443889 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" event={"ID":"6e809e90-d3c7-41ea-b346-103b603acc41","Type":"ContainerStarted","Data":"954d9bb56031721f04f901cb748bb91be3905e6f9ab9cc9dc2672869769960cf"} Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.457491 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-22rvv" event={"ID":"73d33fe2-474d-4bb3-8e6f-d80ea22650fd","Type":"ContainerStarted","Data":"603879f830ace87d827eb99d050020e75c130bc744ee1e908dba0f6354ac6093"} Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.461579 4766 generic.go:334] "Generic (PLEG): container finished" podID="07522b89-e97c-4363-a223-e57f11fa7ec5" containerID="febe1cb7d6e4cd1a37ff84694637d5f6607e70667d4a5c6fdda347c39b5d0331" exitCode=0 Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.461688 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" event={"ID":"07522b89-e97c-4363-a223-e57f11fa7ec5","Type":"ContainerDied","Data":"febe1cb7d6e4cd1a37ff84694637d5f6607e70667d4a5c6fdda347c39b5d0331"} Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.461744 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" event={"ID":"07522b89-e97c-4363-a223-e57f11fa7ec5","Type":"ContainerStarted","Data":"e781f42974065c6a1508f7b5eb6eb01378e861baf1375e3ac274520fdcb13ac1"} Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.469597 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c7s4x" event={"ID":"88e0ee0c-3493-45ce-9dcc-ab7d4958b82a","Type":"ContainerStarted","Data":"c2c91ade9ad49be6ec7a63c6433d6713306accc5f392bdc89aab22bb715388b6"} Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.469964 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c7s4x" event={"ID":"88e0ee0c-3493-45ce-9dcc-ab7d4958b82a","Type":"ContainerStarted","Data":"6458bd4e353bfcd4b4c6bbe13c379e3de649788003358f43cf2901a384fab54a"} Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.485179 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.542461 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppqxm\" (UniqueName: \"kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-kube-api-access-ppqxm\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.542534 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-bound-sa-token\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.542566 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bad33be4-1110-4703-b227-8e8b58184dbc-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-42r8w\" (UID: \"bad33be4-1110-4703-b227-8e8b58184dbc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42r8w" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.542592 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-registry-tls\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.542619 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fc392bf-f07e-4e8c-b95c-76da34bebdb9-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-c7x7f\" (UID: \"8fc392bf-f07e-4e8c-b95c-76da34bebdb9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c7x7f" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.542652 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc392bf-f07e-4e8c-b95c-76da34bebdb9-config\") pod \"kube-controller-manager-operator-78b949d7b-c7x7f\" (UID: \"8fc392bf-f07e-4e8c-b95c-76da34bebdb9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c7x7f" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.542690 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/46dc25cf-00e9-4525-854e-5acd530ae9f6-etcd-client\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.542719 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/46dc25cf-00e9-4525-854e-5acd530ae9f6-etcd-service-ca\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.542748 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/a1385abc-0572-4070-bacc-b125c1c42011-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dwfh5\" (UID: \"a1385abc-0572-4070-bacc-b125c1c42011\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.542796 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bbc14cde-7795-4cb5-a9e0-0c2500835f44-trusted-ca\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.542823 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bbc14cde-7795-4cb5-a9e0-0c2500835f44-ca-trust-extracted\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.542867 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bad33be4-1110-4703-b227-8e8b58184dbc-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-42r8w\" (UID: \"bad33be4-1110-4703-b227-8e8b58184dbc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42r8w" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.542897 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94qc6\" (UniqueName: \"kubernetes.io/projected/46dc25cf-00e9-4525-854e-5acd530ae9f6-kube-api-access-94qc6\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.542930 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.542955 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46dc25cf-00e9-4525-854e-5acd530ae9f6-serving-cert\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: E0102 00:09:34.543702 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:35.043683689 +0000 UTC m=+149.842170843 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.544180 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ad7ca0d9-9d5c-4f2a-bc37-4c75415b7204-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-chm7b\" (UID: \"ad7ca0d9-9d5c-4f2a-bc37-4c75415b7204\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chm7b" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.545169 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bbc14cde-7795-4cb5-a9e0-0c2500835f44-installation-pull-secrets\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.545202 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b2cfe289-2a25-4afa-829a-d44c8cd33fb3-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-j4kpl\" (UID: \"b2cfe289-2a25-4afa-829a-d44c8cd33fb3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.545221 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5g4w\" (UniqueName: \"kubernetes.io/projected/89735c6e-0dba-4fc9-aa99-e6359999d6c9-kube-api-access-h5g4w\") pod \"dns-operator-744455d44c-cps54\" (UID: \"89735c6e-0dba-4fc9-aa99-e6359999d6c9\") " pod="openshift-dns-operator/dns-operator-744455d44c-cps54" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.545240 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/46dc25cf-00e9-4525-854e-5acd530ae9f6-etcd-ca\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.545259 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8z2jq\" (UniqueName: \"kubernetes.io/projected/bad33be4-1110-4703-b227-8e8b58184dbc-kube-api-access-8z2jq\") pod \"openshift-controller-manager-operator-756b6f6bc6-42r8w\" (UID: \"bad33be4-1110-4703-b227-8e8b58184dbc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42r8w" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.545277 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bbc14cde-7795-4cb5-a9e0-0c2500835f44-registry-certificates\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.545294 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8fc392bf-f07e-4e8c-b95c-76da34bebdb9-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-c7x7f\" (UID: \"8fc392bf-f07e-4e8c-b95c-76da34bebdb9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c7x7f" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.545314 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvwq4\" (UniqueName: \"kubernetes.io/projected/a1385abc-0572-4070-bacc-b125c1c42011-kube-api-access-fvwq4\") pod \"openshift-config-operator-7777fb866f-dwfh5\" (UID: \"a1385abc-0572-4070-bacc-b125c1c42011\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.545351 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2cfe289-2a25-4afa-829a-d44c8cd33fb3-config\") pod \"machine-api-operator-5694c8668f-j4kpl\" (UID: \"b2cfe289-2a25-4afa-829a-d44c8cd33fb3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.545368 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/89735c6e-0dba-4fc9-aa99-e6359999d6c9-metrics-tls\") pod \"dns-operator-744455d44c-cps54\" (UID: \"89735c6e-0dba-4fc9-aa99-e6359999d6c9\") " pod="openshift-dns-operator/dns-operator-744455d44c-cps54" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.545387 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1385abc-0572-4070-bacc-b125c1c42011-serving-cert\") pod \"openshift-config-operator-7777fb866f-dwfh5\" (UID: \"a1385abc-0572-4070-bacc-b125c1c42011\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.545416 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mt5w5\" (UniqueName: \"kubernetes.io/projected/ad7ca0d9-9d5c-4f2a-bc37-4c75415b7204-kube-api-access-mt5w5\") pod \"cluster-samples-operator-665b6dd947-chm7b\" (UID: \"ad7ca0d9-9d5c-4f2a-bc37-4c75415b7204\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chm7b" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.545444 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46dc25cf-00e9-4525-854e-5acd530ae9f6-config\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.545463 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b2cfe289-2a25-4afa-829a-d44c8cd33fb3-images\") pod \"machine-api-operator-5694c8668f-j4kpl\" (UID: \"b2cfe289-2a25-4afa-829a-d44c8cd33fb3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.545485 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bq7qn\" (UniqueName: \"kubernetes.io/projected/b2cfe289-2a25-4afa-829a-d44c8cd33fb3-kube-api-access-bq7qn\") pod \"machine-api-operator-5694c8668f-j4kpl\" (UID: \"b2cfe289-2a25-4afa-829a-d44c8cd33fb3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.647277 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:34 crc kubenswrapper[4766]: E0102 00:09:34.647674 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:35.147646441 +0000 UTC m=+149.946133595 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.647830 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c7ee93d-7d23-4ab4-9d8d-2909bd385c53-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-cf7tp\" (UID: \"8c7ee93d-7d23-4ab4-9d8d-2909bd385c53\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7tp" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.647909 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/46dc25cf-00e9-4525-854e-5acd530ae9f6-etcd-client\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.647945 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b18850a4-4232-407e-8548-1ec0e4d7236b-apiservice-cert\") pod \"packageserver-d55dfcdfc-7t82v\" (UID: \"b18850a4-4232-407e-8548-1ec0e4d7236b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648041 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/feeca0b4-25d8-417d-a73c-64a386970b13-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-h6jd7\" (UID: \"feeca0b4-25d8-417d-a73c-64a386970b13\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648062 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/a1385abc-0572-4070-bacc-b125c1c42011-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dwfh5\" (UID: \"a1385abc-0572-4070-bacc-b125c1c42011\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648127 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/46dc25cf-00e9-4525-854e-5acd530ae9f6-etcd-service-ca\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648147 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71a9ab84-dad3-47c3-ad6f-d0670e8eca29-trusted-ca\") pod \"ingress-operator-5b745b69d9-8c5sd\" (UID: \"71a9ab84-dad3-47c3-ad6f-d0670e8eca29\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648192 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4b99j\" (UniqueName: \"kubernetes.io/projected/add97db2-55ef-4d2b-89d3-32453f54579a-kube-api-access-4b99j\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648240 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bad33be4-1110-4703-b227-8e8b58184dbc-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-42r8w\" (UID: \"bad33be4-1110-4703-b227-8e8b58184dbc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42r8w" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648318 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0da67e94-91c7-479b-9c5c-a3044ea1a7be-profile-collector-cert\") pod \"catalog-operator-68c6474976-wkgpj\" (UID: \"0da67e94-91c7-479b-9c5c-a3044ea1a7be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648368 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d31454d7-8948-4164-a060-cfe7b8bb15e4-proxy-tls\") pod \"machine-config-operator-74547568cd-b64cx\" (UID: \"d31454d7-8948-4164-a060-cfe7b8bb15e4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648424 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bbc14cde-7795-4cb5-a9e0-0c2500835f44-installation-pull-secrets\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648444 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b2cfe289-2a25-4afa-829a-d44c8cd33fb3-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-j4kpl\" (UID: \"b2cfe289-2a25-4afa-829a-d44c8cd33fb3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648462 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ad7ca0d9-9d5c-4f2a-bc37-4c75415b7204-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-chm7b\" (UID: \"ad7ca0d9-9d5c-4f2a-bc37-4c75415b7204\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chm7b" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648496 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh449\" (UniqueName: \"kubernetes.io/projected/49a72ec1-ed97-4bc9-947b-1df688c6676d-kube-api-access-gh449\") pod \"collect-profiles-29455200-4bwhv\" (UID: \"49a72ec1-ed97-4bc9-947b-1df688c6676d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648653 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5g4w\" (UniqueName: \"kubernetes.io/projected/89735c6e-0dba-4fc9-aa99-e6359999d6c9-kube-api-access-h5g4w\") pod \"dns-operator-744455d44c-cps54\" (UID: \"89735c6e-0dba-4fc9-aa99-e6359999d6c9\") " pod="openshift-dns-operator/dns-operator-744455d44c-cps54" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648698 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/49a72ec1-ed97-4bc9-947b-1df688c6676d-config-volume\") pod \"collect-profiles-29455200-4bwhv\" (UID: \"49a72ec1-ed97-4bc9-947b-1df688c6676d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648719 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpln9\" (UniqueName: \"kubernetes.io/projected/a5536f9b-c0e4-4b6c-9b28-65a9766ca00b-kube-api-access-hpln9\") pod \"service-ca-9c57cc56f-s6kgk\" (UID: \"a5536f9b-c0e4-4b6c-9b28-65a9766ca00b\") " pod="openshift-service-ca/service-ca-9c57cc56f-s6kgk" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648738 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbdfh\" (UniqueName: \"kubernetes.io/projected/865bdbd0-cd9b-45d3-95f6-a821ad964b1f-kube-api-access-tbdfh\") pod \"ingress-canary-xhj52\" (UID: \"865bdbd0-cd9b-45d3-95f6-a821ad964b1f\") " pod="openshift-ingress-canary/ingress-canary-xhj52" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648775 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f47lp\" (UniqueName: \"kubernetes.io/projected/022dc757-fcdb-4844-9d9a-ef946049094e-kube-api-access-f47lp\") pod \"machine-config-server-mnk85\" (UID: \"022dc757-fcdb-4844-9d9a-ef946049094e\") " pod="openshift-machine-config-operator/machine-config-server-mnk85" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648796 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xr5h\" (UniqueName: \"kubernetes.io/projected/71a9ab84-dad3-47c3-ad6f-d0670e8eca29-kube-api-access-7xr5h\") pod \"ingress-operator-5b745b69d9-8c5sd\" (UID: \"71a9ab84-dad3-47c3-ad6f-d0670e8eca29\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648827 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvwq4\" (UniqueName: \"kubernetes.io/projected/a1385abc-0572-4070-bacc-b125c1c42011-kube-api-access-fvwq4\") pod \"openshift-config-operator-7777fb866f-dwfh5\" (UID: \"a1385abc-0572-4070-bacc-b125c1c42011\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648888 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rg2rd\" (UniqueName: \"kubernetes.io/projected/feeca0b4-25d8-417d-a73c-64a386970b13-kube-api-access-rg2rd\") pod \"machine-config-controller-84d6567774-h6jd7\" (UID: \"feeca0b4-25d8-417d-a73c-64a386970b13\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648907 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrbkc\" (UniqueName: \"kubernetes.io/projected/5901ea26-5679-4e06-b297-4fcbecbdd0ba-kube-api-access-lrbkc\") pod \"control-plane-machine-set-operator-78cbb6b69f-2srpw\" (UID: \"5901ea26-5679-4e06-b297-4fcbecbdd0ba\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2srpw" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648955 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1385abc-0572-4070-bacc-b125c1c42011-serving-cert\") pod \"openshift-config-operator-7777fb866f-dwfh5\" (UID: \"a1385abc-0572-4070-bacc-b125c1c42011\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.648974 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/feeca0b4-25d8-417d-a73c-64a386970b13-proxy-tls\") pod \"machine-config-controller-84d6567774-h6jd7\" (UID: \"feeca0b4-25d8-417d-a73c-64a386970b13\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649026 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/add97db2-55ef-4d2b-89d3-32453f54579a-socket-dir\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649049 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b2cfe289-2a25-4afa-829a-d44c8cd33fb3-images\") pod \"machine-api-operator-5694c8668f-j4kpl\" (UID: \"b2cfe289-2a25-4afa-829a-d44c8cd33fb3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649069 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/16d8672b-0a81-48e3-bbda-d4f2ee461924-profile-collector-cert\") pod \"olm-operator-6b444d44fb-5j85l\" (UID: \"16d8672b-0a81-48e3-bbda-d4f2ee461924\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649103 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c7ee93d-7d23-4ab4-9d8d-2909bd385c53-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-cf7tp\" (UID: \"8c7ee93d-7d23-4ab4-9d8d-2909bd385c53\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7tp" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649151 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dz5pf\" (UniqueName: \"kubernetes.io/projected/c27e1f6d-5925-4ca0-89f2-f447afbf845f-kube-api-access-dz5pf\") pod \"marketplace-operator-79b997595-qlzhc\" (UID: \"c27e1f6d-5925-4ca0-89f2-f447afbf845f\") " pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649171 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/d31454d7-8948-4164-a060-cfe7b8bb15e4-images\") pod \"machine-config-operator-74547568cd-b64cx\" (UID: \"d31454d7-8948-4164-a060-cfe7b8bb15e4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649203 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/add97db2-55ef-4d2b-89d3-32453f54579a-csi-data-dir\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649223 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8677afa8-a25b-4b5f-9c3c-bc13a667b9f0-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-m8p7l\" (UID: \"8677afa8-a25b-4b5f-9c3c-bc13a667b9f0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-m8p7l" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649241 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk7lj\" (UniqueName: \"kubernetes.io/projected/38fd421f-b7a6-4b8a-a623-575d28d65212-kube-api-access-zk7lj\") pod \"package-server-manager-789f6589d5-8g66k\" (UID: \"38fd421f-b7a6-4b8a-a623-575d28d65212\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8g66k" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649257 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/b18850a4-4232-407e-8548-1ec0e4d7236b-tmpfs\") pod \"packageserver-d55dfcdfc-7t82v\" (UID: \"b18850a4-4232-407e-8548-1ec0e4d7236b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649274 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/71a9ab84-dad3-47c3-ad6f-d0670e8eca29-metrics-tls\") pod \"ingress-operator-5b745b69d9-8c5sd\" (UID: \"71a9ab84-dad3-47c3-ad6f-d0670e8eca29\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649291 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f8bcffb8-8576-44bf-a116-5a19996a48a8-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-gsbjs\" (UID: \"f8bcffb8-8576-44bf-a116-5a19996a48a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gsbjs" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649310 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d31454d7-8948-4164-a060-cfe7b8bb15e4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-b64cx\" (UID: \"d31454d7-8948-4164-a060-cfe7b8bb15e4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649343 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/49a72ec1-ed97-4bc9-947b-1df688c6676d-secret-volume\") pod \"collect-profiles-29455200-4bwhv\" (UID: \"49a72ec1-ed97-4bc9-947b-1df688c6676d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649365 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppqxm\" (UniqueName: \"kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-kube-api-access-ppqxm\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649400 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/add97db2-55ef-4d2b-89d3-32453f54579a-mountpoint-dir\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649420 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-registry-tls\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649605 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fc392bf-f07e-4e8c-b95c-76da34bebdb9-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-c7x7f\" (UID: \"8fc392bf-f07e-4e8c-b95c-76da34bebdb9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c7x7f" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649628 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/5901ea26-5679-4e06-b297-4fcbecbdd0ba-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-2srpw\" (UID: \"5901ea26-5679-4e06-b297-4fcbecbdd0ba\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2srpw" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649647 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c27e1f6d-5925-4ca0-89f2-f447afbf845f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qlzhc\" (UID: \"c27e1f6d-5925-4ca0-89f2-f447afbf845f\") " pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649700 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/a1385abc-0572-4070-bacc-b125c1c42011-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dwfh5\" (UID: \"a1385abc-0572-4070-bacc-b125c1c42011\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649724 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/add97db2-55ef-4d2b-89d3-32453f54579a-plugins-dir\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649749 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/865bdbd0-cd9b-45d3-95f6-a821ad964b1f-cert\") pod \"ingress-canary-xhj52\" (UID: \"865bdbd0-cd9b-45d3-95f6-a821ad964b1f\") " pod="openshift-ingress-canary/ingress-canary-xhj52" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649771 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5lk2\" (UniqueName: \"kubernetes.io/projected/933f7496-5243-4054-a3cb-e2383d7ac0ac-kube-api-access-t5lk2\") pod \"dns-default-xldzf\" (UID: \"933f7496-5243-4054-a3cb-e2383d7ac0ac\") " pod="openshift-dns/dns-default-xldzf" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649803 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ls6v\" (UniqueName: \"kubernetes.io/projected/16d8672b-0a81-48e3-bbda-d4f2ee461924-kube-api-access-6ls6v\") pod \"olm-operator-6b444d44fb-5j85l\" (UID: \"16d8672b-0a81-48e3-bbda-d4f2ee461924\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649835 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/38fd421f-b7a6-4b8a-a623-575d28d65212-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-8g66k\" (UID: \"38fd421f-b7a6-4b8a-a623-575d28d65212\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8g66k" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649871 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bce0b60a-2a62-4f0f-a4e7-75cef356a84e-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l8k7h\" (UID: \"bce0b60a-2a62-4f0f-a4e7-75cef356a84e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l8k7h" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649886 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0da67e94-91c7-479b-9c5c-a3044ea1a7be-srv-cert\") pod \"catalog-operator-68c6474976-wkgpj\" (UID: \"0da67e94-91c7-479b-9c5c-a3044ea1a7be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649947 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bbc14cde-7795-4cb5-a9e0-0c2500835f44-trusted-ca\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.649963 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c7ee93d-7d23-4ab4-9d8d-2909bd385c53-config\") pod \"kube-apiserver-operator-766d6c64bb-cf7tp\" (UID: \"8c7ee93d-7d23-4ab4-9d8d-2909bd385c53\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7tp" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650016 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bbc14cde-7795-4cb5-a9e0-0c2500835f44-ca-trust-extracted\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650038 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkqjz\" (UniqueName: \"kubernetes.io/projected/b18850a4-4232-407e-8548-1ec0e4d7236b-kube-api-access-vkqjz\") pod \"packageserver-d55dfcdfc-7t82v\" (UID: \"b18850a4-4232-407e-8548-1ec0e4d7236b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650072 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94qc6\" (UniqueName: \"kubernetes.io/projected/46dc25cf-00e9-4525-854e-5acd530ae9f6-kube-api-access-94qc6\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650154 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/add97db2-55ef-4d2b-89d3-32453f54579a-registration-dir\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650179 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650197 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46dc25cf-00e9-4525-854e-5acd530ae9f6-serving-cert\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650214 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41878387-8c5a-4e22-ae75-38dcdc3bc28c-serving-cert\") pod \"service-ca-operator-777779d784-6ldzt\" (UID: \"41878387-8c5a-4e22-ae75-38dcdc3bc28c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-6ldzt" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650232 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqnjc\" (UniqueName: \"kubernetes.io/projected/f8bcffb8-8576-44bf-a116-5a19996a48a8-kube-api-access-cqnjc\") pod \"multus-admission-controller-857f4d67dd-gsbjs\" (UID: \"f8bcffb8-8576-44bf-a116-5a19996a48a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gsbjs" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650250 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bce0b60a-2a62-4f0f-a4e7-75cef356a84e-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l8k7h\" (UID: \"bce0b60a-2a62-4f0f-a4e7-75cef356a84e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l8k7h" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650267 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/022dc757-fcdb-4844-9d9a-ef946049094e-certs\") pod \"machine-config-server-mnk85\" (UID: \"022dc757-fcdb-4844-9d9a-ef946049094e\") " pod="openshift-machine-config-operator/machine-config-server-mnk85" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650338 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bbc14cde-7795-4cb5-a9e0-0c2500835f44-registry-certificates\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650393 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/46dc25cf-00e9-4525-854e-5acd530ae9f6-etcd-ca\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650431 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8z2jq\" (UniqueName: \"kubernetes.io/projected/bad33be4-1110-4703-b227-8e8b58184dbc-kube-api-access-8z2jq\") pod \"openshift-controller-manager-operator-756b6f6bc6-42r8w\" (UID: \"bad33be4-1110-4703-b227-8e8b58184dbc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42r8w" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650488 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8fc392bf-f07e-4e8c-b95c-76da34bebdb9-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-c7x7f\" (UID: \"8fc392bf-f07e-4e8c-b95c-76da34bebdb9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c7x7f" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650516 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/022dc757-fcdb-4844-9d9a-ef946049094e-node-bootstrap-token\") pod \"machine-config-server-mnk85\" (UID: \"022dc757-fcdb-4844-9d9a-ef946049094e\") " pod="openshift-machine-config-operator/machine-config-server-mnk85" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650537 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/71a9ab84-dad3-47c3-ad6f-d0670e8eca29-bound-sa-token\") pod \"ingress-operator-5b745b69d9-8c5sd\" (UID: \"71a9ab84-dad3-47c3-ad6f-d0670e8eca29\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650594 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8677afa8-a25b-4b5f-9c3c-bc13a667b9f0-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-m8p7l\" (UID: \"8677afa8-a25b-4b5f-9c3c-bc13a667b9f0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-m8p7l" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650614 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/16d8672b-0a81-48e3-bbda-d4f2ee461924-srv-cert\") pod \"olm-operator-6b444d44fb-5j85l\" (UID: \"16d8672b-0a81-48e3-bbda-d4f2ee461924\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650638 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/933f7496-5243-4054-a3cb-e2383d7ac0ac-metrics-tls\") pod \"dns-default-xldzf\" (UID: \"933f7496-5243-4054-a3cb-e2383d7ac0ac\") " pod="openshift-dns/dns-default-xldzf" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650658 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a5536f9b-c0e4-4b6c-9b28-65a9766ca00b-signing-key\") pod \"service-ca-9c57cc56f-s6kgk\" (UID: \"a5536f9b-c0e4-4b6c-9b28-65a9766ca00b\") " pod="openshift-service-ca/service-ca-9c57cc56f-s6kgk" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650678 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-td95v\" (UniqueName: \"kubernetes.io/projected/d31454d7-8948-4164-a060-cfe7b8bb15e4-kube-api-access-td95v\") pod \"machine-config-operator-74547568cd-b64cx\" (UID: \"d31454d7-8948-4164-a060-cfe7b8bb15e4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650721 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2cfe289-2a25-4afa-829a-d44c8cd33fb3-config\") pod \"machine-api-operator-5694c8668f-j4kpl\" (UID: \"b2cfe289-2a25-4afa-829a-d44c8cd33fb3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650739 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/89735c6e-0dba-4fc9-aa99-e6359999d6c9-metrics-tls\") pod \"dns-operator-744455d44c-cps54\" (UID: \"89735c6e-0dba-4fc9-aa99-e6359999d6c9\") " pod="openshift-dns-operator/dns-operator-744455d44c-cps54" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650775 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mt5w5\" (UniqueName: \"kubernetes.io/projected/ad7ca0d9-9d5c-4f2a-bc37-4c75415b7204-kube-api-access-mt5w5\") pod \"cluster-samples-operator-665b6dd947-chm7b\" (UID: \"ad7ca0d9-9d5c-4f2a-bc37-4c75415b7204\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chm7b" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650797 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41878387-8c5a-4e22-ae75-38dcdc3bc28c-config\") pod \"service-ca-operator-777779d784-6ldzt\" (UID: \"41878387-8c5a-4e22-ae75-38dcdc3bc28c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-6ldzt" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650818 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46dc25cf-00e9-4525-854e-5acd530ae9f6-config\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650856 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bq7qn\" (UniqueName: \"kubernetes.io/projected/b2cfe289-2a25-4afa-829a-d44c8cd33fb3-kube-api-access-bq7qn\") pod \"machine-api-operator-5694c8668f-j4kpl\" (UID: \"b2cfe289-2a25-4afa-829a-d44c8cd33fb3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650881 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khjkz\" (UniqueName: \"kubernetes.io/projected/8677afa8-a25b-4b5f-9c3c-bc13a667b9f0-kube-api-access-khjkz\") pod \"kube-storage-version-migrator-operator-b67b599dd-m8p7l\" (UID: \"8677afa8-a25b-4b5f-9c3c-bc13a667b9f0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-m8p7l" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650946 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c27e1f6d-5925-4ca0-89f2-f447afbf845f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qlzhc\" (UID: \"c27e1f6d-5925-4ca0-89f2-f447afbf845f\") " pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.650971 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b18850a4-4232-407e-8548-1ec0e4d7236b-webhook-cert\") pod \"packageserver-d55dfcdfc-7t82v\" (UID: \"b18850a4-4232-407e-8548-1ec0e4d7236b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.651020 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bce0b60a-2a62-4f0f-a4e7-75cef356a84e-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l8k7h\" (UID: \"bce0b60a-2a62-4f0f-a4e7-75cef356a84e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l8k7h" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.651109 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-bound-sa-token\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.651130 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bad33be4-1110-4703-b227-8e8b58184dbc-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-42r8w\" (UID: \"bad33be4-1110-4703-b227-8e8b58184dbc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42r8w" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.651147 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/933f7496-5243-4054-a3cb-e2383d7ac0ac-config-volume\") pod \"dns-default-xldzf\" (UID: \"933f7496-5243-4054-a3cb-e2383d7ac0ac\") " pod="openshift-dns/dns-default-xldzf" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.651194 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4f6n\" (UniqueName: \"kubernetes.io/projected/0da67e94-91c7-479b-9c5c-a3044ea1a7be-kube-api-access-v4f6n\") pod \"catalog-operator-68c6474976-wkgpj\" (UID: \"0da67e94-91c7-479b-9c5c-a3044ea1a7be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.651218 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc392bf-f07e-4e8c-b95c-76da34bebdb9-config\") pod \"kube-controller-manager-operator-78b949d7b-c7x7f\" (UID: \"8fc392bf-f07e-4e8c-b95c-76da34bebdb9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c7x7f" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.651236 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7gfp\" (UniqueName: \"kubernetes.io/projected/41878387-8c5a-4e22-ae75-38dcdc3bc28c-kube-api-access-s7gfp\") pod \"service-ca-operator-777779d784-6ldzt\" (UID: \"41878387-8c5a-4e22-ae75-38dcdc3bc28c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-6ldzt" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.651269 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a5536f9b-c0e4-4b6c-9b28-65a9766ca00b-signing-cabundle\") pod \"service-ca-9c57cc56f-s6kgk\" (UID: \"a5536f9b-c0e4-4b6c-9b28-65a9766ca00b\") " pod="openshift-service-ca/service-ca-9c57cc56f-s6kgk" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.651287 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pg8bx\" (UniqueName: \"kubernetes.io/projected/10e74978-bccd-4d24-8f14-e81a37704a07-kube-api-access-pg8bx\") pod \"migrator-59844c95c7-b6fbc\" (UID: \"10e74978-bccd-4d24-8f14-e81a37704a07\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b6fbc" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.653313 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2cfe289-2a25-4afa-829a-d44c8cd33fb3-config\") pod \"machine-api-operator-5694c8668f-j4kpl\" (UID: \"b2cfe289-2a25-4afa-829a-d44c8cd33fb3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.655102 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bbc14cde-7795-4cb5-a9e0-0c2500835f44-trusted-ca\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.655333 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b2cfe289-2a25-4afa-829a-d44c8cd33fb3-images\") pod \"machine-api-operator-5694c8668f-j4kpl\" (UID: \"b2cfe289-2a25-4afa-829a-d44c8cd33fb3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.655787 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bbc14cde-7795-4cb5-a9e0-0c2500835f44-ca-trust-extracted\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: E0102 00:09:34.655927 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:35.155911448 +0000 UTC m=+149.954398592 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.656009 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46dc25cf-00e9-4525-854e-5acd530ae9f6-config\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.658331 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/46dc25cf-00e9-4525-854e-5acd530ae9f6-etcd-client\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.659443 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/46dc25cf-00e9-4525-854e-5acd530ae9f6-etcd-ca\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.660455 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc392bf-f07e-4e8c-b95c-76da34bebdb9-config\") pod \"kube-controller-manager-operator-78b949d7b-c7x7f\" (UID: \"8fc392bf-f07e-4e8c-b95c-76da34bebdb9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c7x7f" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.661688 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bad33be4-1110-4703-b227-8e8b58184dbc-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-42r8w\" (UID: \"bad33be4-1110-4703-b227-8e8b58184dbc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42r8w" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.661892 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bbc14cde-7795-4cb5-a9e0-0c2500835f44-registry-certificates\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.663020 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46dc25cf-00e9-4525-854e-5acd530ae9f6-serving-cert\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.664570 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/46dc25cf-00e9-4525-854e-5acd530ae9f6-etcd-service-ca\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.666032 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b2cfe289-2a25-4afa-829a-d44c8cd33fb3-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-j4kpl\" (UID: \"b2cfe289-2a25-4afa-829a-d44c8cd33fb3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.667204 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/89735c6e-0dba-4fc9-aa99-e6359999d6c9-metrics-tls\") pod \"dns-operator-744455d44c-cps54\" (UID: \"89735c6e-0dba-4fc9-aa99-e6359999d6c9\") " pod="openshift-dns-operator/dns-operator-744455d44c-cps54" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.667251 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1385abc-0572-4070-bacc-b125c1c42011-serving-cert\") pod \"openshift-config-operator-7777fb866f-dwfh5\" (UID: \"a1385abc-0572-4070-bacc-b125c1c42011\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.670495 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fc392bf-f07e-4e8c-b95c-76da34bebdb9-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-c7x7f\" (UID: \"8fc392bf-f07e-4e8c-b95c-76da34bebdb9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c7x7f" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.670730 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ad7ca0d9-9d5c-4f2a-bc37-4c75415b7204-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-chm7b\" (UID: \"ad7ca0d9-9d5c-4f2a-bc37-4c75415b7204\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chm7b" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.675367 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-registry-tls\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.676237 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bbc14cde-7795-4cb5-a9e0-0c2500835f44-installation-pull-secrets\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.679188 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bad33be4-1110-4703-b227-8e8b58184dbc-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-42r8w\" (UID: \"bad33be4-1110-4703-b227-8e8b58184dbc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42r8w" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.691296 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8fc392bf-f07e-4e8c-b95c-76da34bebdb9-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-c7x7f\" (UID: \"8fc392bf-f07e-4e8c-b95c-76da34bebdb9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c7x7f" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.715502 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bq7qn\" (UniqueName: \"kubernetes.io/projected/b2cfe289-2a25-4afa-829a-d44c8cd33fb3-kube-api-access-bq7qn\") pod \"machine-api-operator-5694c8668f-j4kpl\" (UID: \"b2cfe289-2a25-4afa-829a-d44c8cd33fb3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.720808 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.721664 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-8bc2w"] Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.731804 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mt5w5\" (UniqueName: \"kubernetes.io/projected/ad7ca0d9-9d5c-4f2a-bc37-4c75415b7204-kube-api-access-mt5w5\") pod \"cluster-samples-operator-665b6dd947-chm7b\" (UID: \"ad7ca0d9-9d5c-4f2a-bc37-4c75415b7204\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chm7b" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.751879 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:34 crc kubenswrapper[4766]: E0102 00:09:34.752052 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:35.252021133 +0000 UTC m=+150.050508287 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752098 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkqjz\" (UniqueName: \"kubernetes.io/projected/b18850a4-4232-407e-8548-1ec0e4d7236b-kube-api-access-vkqjz\") pod \"packageserver-d55dfcdfc-7t82v\" (UID: \"b18850a4-4232-407e-8548-1ec0e4d7236b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752141 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/add97db2-55ef-4d2b-89d3-32453f54579a-registration-dir\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752159 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41878387-8c5a-4e22-ae75-38dcdc3bc28c-serving-cert\") pod \"service-ca-operator-777779d784-6ldzt\" (UID: \"41878387-8c5a-4e22-ae75-38dcdc3bc28c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-6ldzt" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752182 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752203 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqnjc\" (UniqueName: \"kubernetes.io/projected/f8bcffb8-8576-44bf-a116-5a19996a48a8-kube-api-access-cqnjc\") pod \"multus-admission-controller-857f4d67dd-gsbjs\" (UID: \"f8bcffb8-8576-44bf-a116-5a19996a48a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gsbjs" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752223 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bce0b60a-2a62-4f0f-a4e7-75cef356a84e-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l8k7h\" (UID: \"bce0b60a-2a62-4f0f-a4e7-75cef356a84e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l8k7h" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752242 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/022dc757-fcdb-4844-9d9a-ef946049094e-certs\") pod \"machine-config-server-mnk85\" (UID: \"022dc757-fcdb-4844-9d9a-ef946049094e\") " pod="openshift-machine-config-operator/machine-config-server-mnk85" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752272 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/022dc757-fcdb-4844-9d9a-ef946049094e-node-bootstrap-token\") pod \"machine-config-server-mnk85\" (UID: \"022dc757-fcdb-4844-9d9a-ef946049094e\") " pod="openshift-machine-config-operator/machine-config-server-mnk85" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752292 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/16d8672b-0a81-48e3-bbda-d4f2ee461924-srv-cert\") pod \"olm-operator-6b444d44fb-5j85l\" (UID: \"16d8672b-0a81-48e3-bbda-d4f2ee461924\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752311 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/933f7496-5243-4054-a3cb-e2383d7ac0ac-metrics-tls\") pod \"dns-default-xldzf\" (UID: \"933f7496-5243-4054-a3cb-e2383d7ac0ac\") " pod="openshift-dns/dns-default-xldzf" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752331 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/71a9ab84-dad3-47c3-ad6f-d0670e8eca29-bound-sa-token\") pod \"ingress-operator-5b745b69d9-8c5sd\" (UID: \"71a9ab84-dad3-47c3-ad6f-d0670e8eca29\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752349 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8677afa8-a25b-4b5f-9c3c-bc13a667b9f0-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-m8p7l\" (UID: \"8677afa8-a25b-4b5f-9c3c-bc13a667b9f0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-m8p7l" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752372 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a5536f9b-c0e4-4b6c-9b28-65a9766ca00b-signing-key\") pod \"service-ca-9c57cc56f-s6kgk\" (UID: \"a5536f9b-c0e4-4b6c-9b28-65a9766ca00b\") " pod="openshift-service-ca/service-ca-9c57cc56f-s6kgk" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752391 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-td95v\" (UniqueName: \"kubernetes.io/projected/d31454d7-8948-4164-a060-cfe7b8bb15e4-kube-api-access-td95v\") pod \"machine-config-operator-74547568cd-b64cx\" (UID: \"d31454d7-8948-4164-a060-cfe7b8bb15e4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752420 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41878387-8c5a-4e22-ae75-38dcdc3bc28c-config\") pod \"service-ca-operator-777779d784-6ldzt\" (UID: \"41878387-8c5a-4e22-ae75-38dcdc3bc28c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-6ldzt" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752440 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khjkz\" (UniqueName: \"kubernetes.io/projected/8677afa8-a25b-4b5f-9c3c-bc13a667b9f0-kube-api-access-khjkz\") pod \"kube-storage-version-migrator-operator-b67b599dd-m8p7l\" (UID: \"8677afa8-a25b-4b5f-9c3c-bc13a667b9f0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-m8p7l" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752463 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c27e1f6d-5925-4ca0-89f2-f447afbf845f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qlzhc\" (UID: \"c27e1f6d-5925-4ca0-89f2-f447afbf845f\") " pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752483 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b18850a4-4232-407e-8548-1ec0e4d7236b-webhook-cert\") pod \"packageserver-d55dfcdfc-7t82v\" (UID: \"b18850a4-4232-407e-8548-1ec0e4d7236b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752501 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bce0b60a-2a62-4f0f-a4e7-75cef356a84e-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l8k7h\" (UID: \"bce0b60a-2a62-4f0f-a4e7-75cef356a84e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l8k7h" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752524 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/933f7496-5243-4054-a3cb-e2383d7ac0ac-config-volume\") pod \"dns-default-xldzf\" (UID: \"933f7496-5243-4054-a3cb-e2383d7ac0ac\") " pod="openshift-dns/dns-default-xldzf" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752553 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4f6n\" (UniqueName: \"kubernetes.io/projected/0da67e94-91c7-479b-9c5c-a3044ea1a7be-kube-api-access-v4f6n\") pod \"catalog-operator-68c6474976-wkgpj\" (UID: \"0da67e94-91c7-479b-9c5c-a3044ea1a7be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752576 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7gfp\" (UniqueName: \"kubernetes.io/projected/41878387-8c5a-4e22-ae75-38dcdc3bc28c-kube-api-access-s7gfp\") pod \"service-ca-operator-777779d784-6ldzt\" (UID: \"41878387-8c5a-4e22-ae75-38dcdc3bc28c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-6ldzt" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752596 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a5536f9b-c0e4-4b6c-9b28-65a9766ca00b-signing-cabundle\") pod \"service-ca-9c57cc56f-s6kgk\" (UID: \"a5536f9b-c0e4-4b6c-9b28-65a9766ca00b\") " pod="openshift-service-ca/service-ca-9c57cc56f-s6kgk" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752618 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pg8bx\" (UniqueName: \"kubernetes.io/projected/10e74978-bccd-4d24-8f14-e81a37704a07-kube-api-access-pg8bx\") pod \"migrator-59844c95c7-b6fbc\" (UID: \"10e74978-bccd-4d24-8f14-e81a37704a07\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b6fbc" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752638 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b18850a4-4232-407e-8548-1ec0e4d7236b-apiservice-cert\") pod \"packageserver-d55dfcdfc-7t82v\" (UID: \"b18850a4-4232-407e-8548-1ec0e4d7236b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752658 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c7ee93d-7d23-4ab4-9d8d-2909bd385c53-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-cf7tp\" (UID: \"8c7ee93d-7d23-4ab4-9d8d-2909bd385c53\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7tp" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752680 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/feeca0b4-25d8-417d-a73c-64a386970b13-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-h6jd7\" (UID: \"feeca0b4-25d8-417d-a73c-64a386970b13\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752702 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4b99j\" (UniqueName: \"kubernetes.io/projected/add97db2-55ef-4d2b-89d3-32453f54579a-kube-api-access-4b99j\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752722 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71a9ab84-dad3-47c3-ad6f-d0670e8eca29-trusted-ca\") pod \"ingress-operator-5b745b69d9-8c5sd\" (UID: \"71a9ab84-dad3-47c3-ad6f-d0670e8eca29\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752745 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0da67e94-91c7-479b-9c5c-a3044ea1a7be-profile-collector-cert\") pod \"catalog-operator-68c6474976-wkgpj\" (UID: \"0da67e94-91c7-479b-9c5c-a3044ea1a7be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752765 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d31454d7-8948-4164-a060-cfe7b8bb15e4-proxy-tls\") pod \"machine-config-operator-74547568cd-b64cx\" (UID: \"d31454d7-8948-4164-a060-cfe7b8bb15e4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752797 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh449\" (UniqueName: \"kubernetes.io/projected/49a72ec1-ed97-4bc9-947b-1df688c6676d-kube-api-access-gh449\") pod \"collect-profiles-29455200-4bwhv\" (UID: \"49a72ec1-ed97-4bc9-947b-1df688c6676d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752826 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/49a72ec1-ed97-4bc9-947b-1df688c6676d-config-volume\") pod \"collect-profiles-29455200-4bwhv\" (UID: \"49a72ec1-ed97-4bc9-947b-1df688c6676d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752852 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpln9\" (UniqueName: \"kubernetes.io/projected/a5536f9b-c0e4-4b6c-9b28-65a9766ca00b-kube-api-access-hpln9\") pod \"service-ca-9c57cc56f-s6kgk\" (UID: \"a5536f9b-c0e4-4b6c-9b28-65a9766ca00b\") " pod="openshift-service-ca/service-ca-9c57cc56f-s6kgk" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752896 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbdfh\" (UniqueName: \"kubernetes.io/projected/865bdbd0-cd9b-45d3-95f6-a821ad964b1f-kube-api-access-tbdfh\") pod \"ingress-canary-xhj52\" (UID: \"865bdbd0-cd9b-45d3-95f6-a821ad964b1f\") " pod="openshift-ingress-canary/ingress-canary-xhj52" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752919 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f47lp\" (UniqueName: \"kubernetes.io/projected/022dc757-fcdb-4844-9d9a-ef946049094e-kube-api-access-f47lp\") pod \"machine-config-server-mnk85\" (UID: \"022dc757-fcdb-4844-9d9a-ef946049094e\") " pod="openshift-machine-config-operator/machine-config-server-mnk85" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752939 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xr5h\" (UniqueName: \"kubernetes.io/projected/71a9ab84-dad3-47c3-ad6f-d0670e8eca29-kube-api-access-7xr5h\") pod \"ingress-operator-5b745b69d9-8c5sd\" (UID: \"71a9ab84-dad3-47c3-ad6f-d0670e8eca29\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752978 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rg2rd\" (UniqueName: \"kubernetes.io/projected/feeca0b4-25d8-417d-a73c-64a386970b13-kube-api-access-rg2rd\") pod \"machine-config-controller-84d6567774-h6jd7\" (UID: \"feeca0b4-25d8-417d-a73c-64a386970b13\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.752999 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrbkc\" (UniqueName: \"kubernetes.io/projected/5901ea26-5679-4e06-b297-4fcbecbdd0ba-kube-api-access-lrbkc\") pod \"control-plane-machine-set-operator-78cbb6b69f-2srpw\" (UID: \"5901ea26-5679-4e06-b297-4fcbecbdd0ba\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2srpw" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753018 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/feeca0b4-25d8-417d-a73c-64a386970b13-proxy-tls\") pod \"machine-config-controller-84d6567774-h6jd7\" (UID: \"feeca0b4-25d8-417d-a73c-64a386970b13\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753042 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/add97db2-55ef-4d2b-89d3-32453f54579a-socket-dir\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753067 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/16d8672b-0a81-48e3-bbda-d4f2ee461924-profile-collector-cert\") pod \"olm-operator-6b444d44fb-5j85l\" (UID: \"16d8672b-0a81-48e3-bbda-d4f2ee461924\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753087 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c7ee93d-7d23-4ab4-9d8d-2909bd385c53-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-cf7tp\" (UID: \"8c7ee93d-7d23-4ab4-9d8d-2909bd385c53\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7tp" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753107 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/add97db2-55ef-4d2b-89d3-32453f54579a-csi-data-dir\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753139 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dz5pf\" (UniqueName: \"kubernetes.io/projected/c27e1f6d-5925-4ca0-89f2-f447afbf845f-kube-api-access-dz5pf\") pod \"marketplace-operator-79b997595-qlzhc\" (UID: \"c27e1f6d-5925-4ca0-89f2-f447afbf845f\") " pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753158 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/d31454d7-8948-4164-a060-cfe7b8bb15e4-images\") pod \"machine-config-operator-74547568cd-b64cx\" (UID: \"d31454d7-8948-4164-a060-cfe7b8bb15e4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753176 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/b18850a4-4232-407e-8548-1ec0e4d7236b-tmpfs\") pod \"packageserver-d55dfcdfc-7t82v\" (UID: \"b18850a4-4232-407e-8548-1ec0e4d7236b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753198 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/71a9ab84-dad3-47c3-ad6f-d0670e8eca29-metrics-tls\") pod \"ingress-operator-5b745b69d9-8c5sd\" (UID: \"71a9ab84-dad3-47c3-ad6f-d0670e8eca29\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753220 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8677afa8-a25b-4b5f-9c3c-bc13a667b9f0-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-m8p7l\" (UID: \"8677afa8-a25b-4b5f-9c3c-bc13a667b9f0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-m8p7l" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753244 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk7lj\" (UniqueName: \"kubernetes.io/projected/38fd421f-b7a6-4b8a-a623-575d28d65212-kube-api-access-zk7lj\") pod \"package-server-manager-789f6589d5-8g66k\" (UID: \"38fd421f-b7a6-4b8a-a623-575d28d65212\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8g66k" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753265 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/49a72ec1-ed97-4bc9-947b-1df688c6676d-secret-volume\") pod \"collect-profiles-29455200-4bwhv\" (UID: \"49a72ec1-ed97-4bc9-947b-1df688c6676d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753288 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f8bcffb8-8576-44bf-a116-5a19996a48a8-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-gsbjs\" (UID: \"f8bcffb8-8576-44bf-a116-5a19996a48a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gsbjs" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753314 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d31454d7-8948-4164-a060-cfe7b8bb15e4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-b64cx\" (UID: \"d31454d7-8948-4164-a060-cfe7b8bb15e4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753357 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/5901ea26-5679-4e06-b297-4fcbecbdd0ba-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-2srpw\" (UID: \"5901ea26-5679-4e06-b297-4fcbecbdd0ba\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2srpw" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753382 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/add97db2-55ef-4d2b-89d3-32453f54579a-mountpoint-dir\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753404 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c27e1f6d-5925-4ca0-89f2-f447afbf845f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qlzhc\" (UID: \"c27e1f6d-5925-4ca0-89f2-f447afbf845f\") " pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753427 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/add97db2-55ef-4d2b-89d3-32453f54579a-plugins-dir\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753450 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ls6v\" (UniqueName: \"kubernetes.io/projected/16d8672b-0a81-48e3-bbda-d4f2ee461924-kube-api-access-6ls6v\") pod \"olm-operator-6b444d44fb-5j85l\" (UID: \"16d8672b-0a81-48e3-bbda-d4f2ee461924\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753468 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/865bdbd0-cd9b-45d3-95f6-a821ad964b1f-cert\") pod \"ingress-canary-xhj52\" (UID: \"865bdbd0-cd9b-45d3-95f6-a821ad964b1f\") " pod="openshift-ingress-canary/ingress-canary-xhj52" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753489 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5lk2\" (UniqueName: \"kubernetes.io/projected/933f7496-5243-4054-a3cb-e2383d7ac0ac-kube-api-access-t5lk2\") pod \"dns-default-xldzf\" (UID: \"933f7496-5243-4054-a3cb-e2383d7ac0ac\") " pod="openshift-dns/dns-default-xldzf" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753510 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/38fd421f-b7a6-4b8a-a623-575d28d65212-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-8g66k\" (UID: \"38fd421f-b7a6-4b8a-a623-575d28d65212\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8g66k" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753531 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bce0b60a-2a62-4f0f-a4e7-75cef356a84e-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l8k7h\" (UID: \"bce0b60a-2a62-4f0f-a4e7-75cef356a84e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l8k7h" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753552 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0da67e94-91c7-479b-9c5c-a3044ea1a7be-srv-cert\") pod \"catalog-operator-68c6474976-wkgpj\" (UID: \"0da67e94-91c7-479b-9c5c-a3044ea1a7be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.753580 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c7ee93d-7d23-4ab4-9d8d-2909bd385c53-config\") pod \"kube-apiserver-operator-766d6c64bb-cf7tp\" (UID: \"8c7ee93d-7d23-4ab4-9d8d-2909bd385c53\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7tp" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.754731 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41878387-8c5a-4e22-ae75-38dcdc3bc28c-config\") pod \"service-ca-operator-777779d784-6ldzt\" (UID: \"41878387-8c5a-4e22-ae75-38dcdc3bc28c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-6ldzt" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.754928 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/feeca0b4-25d8-417d-a73c-64a386970b13-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-h6jd7\" (UID: \"feeca0b4-25d8-417d-a73c-64a386970b13\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.754966 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/add97db2-55ef-4d2b-89d3-32453f54579a-registration-dir\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.756364 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71a9ab84-dad3-47c3-ad6f-d0670e8eca29-trusted-ca\") pod \"ingress-operator-5b745b69d9-8c5sd\" (UID: \"71a9ab84-dad3-47c3-ad6f-d0670e8eca29\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.757246 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/49a72ec1-ed97-4bc9-947b-1df688c6676d-config-volume\") pod \"collect-profiles-29455200-4bwhv\" (UID: \"49a72ec1-ed97-4bc9-947b-1df688c6676d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.757665 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/add97db2-55ef-4d2b-89d3-32453f54579a-csi-data-dir\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.757780 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d31454d7-8948-4164-a060-cfe7b8bb15e4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-b64cx\" (UID: \"d31454d7-8948-4164-a060-cfe7b8bb15e4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.757793 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/add97db2-55ef-4d2b-89d3-32453f54579a-mountpoint-dir\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.758768 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/b18850a4-4232-407e-8548-1ec0e4d7236b-tmpfs\") pod \"packageserver-d55dfcdfc-7t82v\" (UID: \"b18850a4-4232-407e-8548-1ec0e4d7236b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.760124 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c27e1f6d-5925-4ca0-89f2-f447afbf845f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qlzhc\" (UID: \"c27e1f6d-5925-4ca0-89f2-f447afbf845f\") " pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.761321 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/add97db2-55ef-4d2b-89d3-32453f54579a-plugins-dir\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.761592 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b18850a4-4232-407e-8548-1ec0e4d7236b-apiservice-cert\") pod \"packageserver-d55dfcdfc-7t82v\" (UID: \"b18850a4-4232-407e-8548-1ec0e4d7236b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.762390 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/add97db2-55ef-4d2b-89d3-32453f54579a-socket-dir\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:34 crc kubenswrapper[4766]: E0102 00:09:34.762971 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:35.260860555 +0000 UTC m=+150.059347899 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.763249 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d31454d7-8948-4164-a060-cfe7b8bb15e4-proxy-tls\") pod \"machine-config-operator-74547568cd-b64cx\" (UID: \"d31454d7-8948-4164-a060-cfe7b8bb15e4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.763686 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41878387-8c5a-4e22-ae75-38dcdc3bc28c-serving-cert\") pod \"service-ca-operator-777779d784-6ldzt\" (UID: \"41878387-8c5a-4e22-ae75-38dcdc3bc28c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-6ldzt" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.764009 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c7ee93d-7d23-4ab4-9d8d-2909bd385c53-config\") pod \"kube-apiserver-operator-766d6c64bb-cf7tp\" (UID: \"8c7ee93d-7d23-4ab4-9d8d-2909bd385c53\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7tp" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.764627 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a5536f9b-c0e4-4b6c-9b28-65a9766ca00b-signing-cabundle\") pod \"service-ca-9c57cc56f-s6kgk\" (UID: \"a5536f9b-c0e4-4b6c-9b28-65a9766ca00b\") " pod="openshift-service-ca/service-ca-9c57cc56f-s6kgk" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.765836 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/49a72ec1-ed97-4bc9-947b-1df688c6676d-secret-volume\") pod \"collect-profiles-29455200-4bwhv\" (UID: \"49a72ec1-ed97-4bc9-947b-1df688c6676d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.766042 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/16d8672b-0a81-48e3-bbda-d4f2ee461924-profile-collector-cert\") pod \"olm-operator-6b444d44fb-5j85l\" (UID: \"16d8672b-0a81-48e3-bbda-d4f2ee461924\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.766384 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94qc6\" (UniqueName: \"kubernetes.io/projected/46dc25cf-00e9-4525-854e-5acd530ae9f6-kube-api-access-94qc6\") pod \"etcd-operator-b45778765-q77bn\" (UID: \"46dc25cf-00e9-4525-854e-5acd530ae9f6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.766521 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/933f7496-5243-4054-a3cb-e2383d7ac0ac-config-volume\") pod \"dns-default-xldzf\" (UID: \"933f7496-5243-4054-a3cb-e2383d7ac0ac\") " pod="openshift-dns/dns-default-xldzf" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.767748 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/d31454d7-8948-4164-a060-cfe7b8bb15e4-images\") pod \"machine-config-operator-74547568cd-b64cx\" (UID: \"d31454d7-8948-4164-a060-cfe7b8bb15e4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.767766 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8677afa8-a25b-4b5f-9c3c-bc13a667b9f0-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-m8p7l\" (UID: \"8677afa8-a25b-4b5f-9c3c-bc13a667b9f0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-m8p7l" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.768701 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/933f7496-5243-4054-a3cb-e2383d7ac0ac-metrics-tls\") pod \"dns-default-xldzf\" (UID: \"933f7496-5243-4054-a3cb-e2383d7ac0ac\") " pod="openshift-dns/dns-default-xldzf" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.769031 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/5901ea26-5679-4e06-b297-4fcbecbdd0ba-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-2srpw\" (UID: \"5901ea26-5679-4e06-b297-4fcbecbdd0ba\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2srpw" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.769125 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8677afa8-a25b-4b5f-9c3c-bc13a667b9f0-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-m8p7l\" (UID: \"8677afa8-a25b-4b5f-9c3c-bc13a667b9f0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-m8p7l" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.769349 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c7ee93d-7d23-4ab4-9d8d-2909bd385c53-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-cf7tp\" (UID: \"8c7ee93d-7d23-4ab4-9d8d-2909bd385c53\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7tp" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.769827 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0da67e94-91c7-479b-9c5c-a3044ea1a7be-profile-collector-cert\") pod \"catalog-operator-68c6474976-wkgpj\" (UID: \"0da67e94-91c7-479b-9c5c-a3044ea1a7be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.770412 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8z2jq\" (UniqueName: \"kubernetes.io/projected/bad33be4-1110-4703-b227-8e8b58184dbc-kube-api-access-8z2jq\") pod \"openshift-controller-manager-operator-756b6f6bc6-42r8w\" (UID: \"bad33be4-1110-4703-b227-8e8b58184dbc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42r8w" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.770883 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/022dc757-fcdb-4844-9d9a-ef946049094e-node-bootstrap-token\") pod \"machine-config-server-mnk85\" (UID: \"022dc757-fcdb-4844-9d9a-ef946049094e\") " pod="openshift-machine-config-operator/machine-config-server-mnk85" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.772302 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b18850a4-4232-407e-8548-1ec0e4d7236b-webhook-cert\") pod \"packageserver-d55dfcdfc-7t82v\" (UID: \"b18850a4-4232-407e-8548-1ec0e4d7236b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.772351 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f8bcffb8-8576-44bf-a116-5a19996a48a8-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-gsbjs\" (UID: \"f8bcffb8-8576-44bf-a116-5a19996a48a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gsbjs" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.772695 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c27e1f6d-5925-4ca0-89f2-f447afbf845f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qlzhc\" (UID: \"c27e1f6d-5925-4ca0-89f2-f447afbf845f\") " pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.773196 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a5536f9b-c0e4-4b6c-9b28-65a9766ca00b-signing-key\") pod \"service-ca-9c57cc56f-s6kgk\" (UID: \"a5536f9b-c0e4-4b6c-9b28-65a9766ca00b\") " pod="openshift-service-ca/service-ca-9c57cc56f-s6kgk" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.773836 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bce0b60a-2a62-4f0f-a4e7-75cef356a84e-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l8k7h\" (UID: \"bce0b60a-2a62-4f0f-a4e7-75cef356a84e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l8k7h" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.778676 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bce0b60a-2a62-4f0f-a4e7-75cef356a84e-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l8k7h\" (UID: \"bce0b60a-2a62-4f0f-a4e7-75cef356a84e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l8k7h" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.779038 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/71a9ab84-dad3-47c3-ad6f-d0670e8eca29-metrics-tls\") pod \"ingress-operator-5b745b69d9-8c5sd\" (UID: \"71a9ab84-dad3-47c3-ad6f-d0670e8eca29\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.779212 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/865bdbd0-cd9b-45d3-95f6-a821ad964b1f-cert\") pod \"ingress-canary-xhj52\" (UID: \"865bdbd0-cd9b-45d3-95f6-a821ad964b1f\") " pod="openshift-ingress-canary/ingress-canary-xhj52" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.779740 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/feeca0b4-25d8-417d-a73c-64a386970b13-proxy-tls\") pod \"machine-config-controller-84d6567774-h6jd7\" (UID: \"feeca0b4-25d8-417d-a73c-64a386970b13\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.780807 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0da67e94-91c7-479b-9c5c-a3044ea1a7be-srv-cert\") pod \"catalog-operator-68c6474976-wkgpj\" (UID: \"0da67e94-91c7-479b-9c5c-a3044ea1a7be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.783229 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/16d8672b-0a81-48e3-bbda-d4f2ee461924-srv-cert\") pod \"olm-operator-6b444d44fb-5j85l\" (UID: \"16d8672b-0a81-48e3-bbda-d4f2ee461924\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.783829 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/38fd421f-b7a6-4b8a-a623-575d28d65212-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-8g66k\" (UID: \"38fd421f-b7a6-4b8a-a623-575d28d65212\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8g66k" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.793349 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvwq4\" (UniqueName: \"kubernetes.io/projected/a1385abc-0572-4070-bacc-b125c1c42011-kube-api-access-fvwq4\") pod \"openshift-config-operator-7777fb866f-dwfh5\" (UID: \"a1385abc-0572-4070-bacc-b125c1c42011\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.795035 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/022dc757-fcdb-4844-9d9a-ef946049094e-certs\") pod \"machine-config-server-mnk85\" (UID: \"022dc757-fcdb-4844-9d9a-ef946049094e\") " pod="openshift-machine-config-operator/machine-config-server-mnk85" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.795302 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chm7b" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.811511 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.831546 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppqxm\" (UniqueName: \"kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-kube-api-access-ppqxm\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.843353 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-pthnj"] Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.843447 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-czkht"] Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.850870 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-bound-sa-token\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.852534 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7"] Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.852619 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-cnd4q"] Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.857076 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:34 crc kubenswrapper[4766]: E0102 00:09:34.857616 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:35.357575495 +0000 UTC m=+150.156062659 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.858907 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:34 crc kubenswrapper[4766]: E0102 00:09:34.860362 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:35.360343534 +0000 UTC m=+150.158830688 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.869905 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5g4w\" (UniqueName: \"kubernetes.io/projected/89735c6e-0dba-4fc9-aa99-e6359999d6c9-kube-api-access-h5g4w\") pod \"dns-operator-744455d44c-cps54\" (UID: \"89735c6e-0dba-4fc9-aa99-e6359999d6c9\") " pod="openshift-dns-operator/dns-operator-744455d44c-cps54" Jan 02 00:09:34 crc kubenswrapper[4766]: W0102 00:09:34.888463 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda59b5466_b9dd_45f1_be0b_18b7afa27942.slice/crio-0262d45d2f9c480aee3b1bccdc362df873ffaa9d53f27995d5524d3da47620de WatchSource:0}: Error finding container 0262d45d2f9c480aee3b1bccdc362df873ffaa9d53f27995d5524d3da47620de: Status 404 returned error can't find the container with id 0262d45d2f9c480aee3b1bccdc362df873ffaa9d53f27995d5524d3da47620de Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.895388 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.916652 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s5ggw"] Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.926069 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f47lp\" (UniqueName: \"kubernetes.io/projected/022dc757-fcdb-4844-9d9a-ef946049094e-kube-api-access-f47lp\") pod \"machine-config-server-mnk85\" (UID: \"022dc757-fcdb-4844-9d9a-ef946049094e\") " pod="openshift-machine-config-operator/machine-config-server-mnk85" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.929179 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-482gx"] Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.934107 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c7ee93d-7d23-4ab4-9d8d-2909bd385c53-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-cf7tp\" (UID: \"8c7ee93d-7d23-4ab4-9d8d-2909bd385c53\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7tp" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.944053 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pg8bx\" (UniqueName: \"kubernetes.io/projected/10e74978-bccd-4d24-8f14-e81a37704a07-kube-api-access-pg8bx\") pod \"migrator-59844c95c7-b6fbc\" (UID: \"10e74978-bccd-4d24-8f14-e81a37704a07\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b6fbc" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.944360 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29455200-8qg7p"] Jan 02 00:09:34 crc kubenswrapper[4766]: W0102 00:09:34.953973 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f019efc_00d2_4d74_bebf_70782e52182b.slice/crio-ac49f393a7e0dd72b1ef26453587431d1559eb7071c1ccbba25e42edb6867d66 WatchSource:0}: Error finding container ac49f393a7e0dd72b1ef26453587431d1559eb7071c1ccbba25e42edb6867d66: Status 404 returned error can't find the container with id ac49f393a7e0dd72b1ef26453587431d1559eb7071c1ccbba25e42edb6867d66 Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.961664 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:34 crc kubenswrapper[4766]: E0102 00:09:34.962115 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:35.462095811 +0000 UTC m=+150.260582965 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.965835 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-td95v\" (UniqueName: \"kubernetes.io/projected/d31454d7-8948-4164-a060-cfe7b8bb15e4-kube-api-access-td95v\") pod \"machine-config-operator-74547568cd-b64cx\" (UID: \"d31454d7-8948-4164-a060-cfe7b8bb15e4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.974748 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkqjz\" (UniqueName: \"kubernetes.io/projected/b18850a4-4232-407e-8548-1ec0e4d7236b-kube-api-access-vkqjz\") pod \"packageserver-d55dfcdfc-7t82v\" (UID: \"b18850a4-4232-407e-8548-1ec0e4d7236b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.982018 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c7x7f" Jan 02 00:09:34 crc kubenswrapper[4766]: I0102 00:09:34.993177 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42r8w" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.010227 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7tp" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.014364 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xr5h\" (UniqueName: \"kubernetes.io/projected/71a9ab84-dad3-47c3-ad6f-d0670e8eca29-kube-api-access-7xr5h\") pod \"ingress-operator-5b745b69d9-8c5sd\" (UID: \"71a9ab84-dad3-47c3-ad6f-d0670e8eca29\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.021817 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khjkz\" (UniqueName: \"kubernetes.io/projected/8677afa8-a25b-4b5f-9c3c-bc13a667b9f0-kube-api-access-khjkz\") pod \"kube-storage-version-migrator-operator-b67b599dd-m8p7l\" (UID: \"8677afa8-a25b-4b5f-9c3c-bc13a667b9f0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-m8p7l" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.025833 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b6fbc" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.031174 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4b99j\" (UniqueName: \"kubernetes.io/projected/add97db2-55ef-4d2b-89d3-32453f54579a-kube-api-access-4b99j\") pod \"csi-hostpathplugin-wjtkd\" (UID: \"add97db2-55ef-4d2b-89d3-32453f54579a\") " pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.050597 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.063533 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:35 crc kubenswrapper[4766]: E0102 00:09:35.064139 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:35.564123094 +0000 UTC m=+150.362610248 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.071643 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrbkc\" (UniqueName: \"kubernetes.io/projected/5901ea26-5679-4e06-b297-4fcbecbdd0ba-kube-api-access-lrbkc\") pod \"control-plane-machine-set-operator-78cbb6b69f-2srpw\" (UID: \"5901ea26-5679-4e06-b297-4fcbecbdd0ba\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2srpw" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.085624 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rg2rd\" (UniqueName: \"kubernetes.io/projected/feeca0b4-25d8-417d-a73c-64a386970b13-kube-api-access-rg2rd\") pod \"machine-config-controller-84d6567774-h6jd7\" (UID: \"feeca0b4-25d8-417d-a73c-64a386970b13\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.093577 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh449\" (UniqueName: \"kubernetes.io/projected/49a72ec1-ed97-4bc9-947b-1df688c6676d-kube-api-access-gh449\") pod \"collect-profiles-29455200-4bwhv\" (UID: \"49a72ec1-ed97-4bc9-947b-1df688c6676d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.103545 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.111169 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpln9\" (UniqueName: \"kubernetes.io/projected/a5536f9b-c0e4-4b6c-9b28-65a9766ca00b-kube-api-access-hpln9\") pod \"service-ca-9c57cc56f-s6kgk\" (UID: \"a5536f9b-c0e4-4b6c-9b28-65a9766ca00b\") " pod="openshift-service-ca/service-ca-9c57cc56f-s6kgk" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.113954 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.131905 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ls6v\" (UniqueName: \"kubernetes.io/projected/16d8672b-0a81-48e3-bbda-d4f2ee461924-kube-api-access-6ls6v\") pod \"olm-operator-6b444d44fb-5j85l\" (UID: \"16d8672b-0a81-48e3-bbda-d4f2ee461924\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.144991 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-mnk85" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.150032 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbdfh\" (UniqueName: \"kubernetes.io/projected/865bdbd0-cd9b-45d3-95f6-a821ad964b1f-kube-api-access-tbdfh\") pod \"ingress-canary-xhj52\" (UID: \"865bdbd0-cd9b-45d3-95f6-a821ad964b1f\") " pod="openshift-ingress-canary/ingress-canary-xhj52" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.163608 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-j4kpl"] Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.167420 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.167611 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dz5pf\" (UniqueName: \"kubernetes.io/projected/c27e1f6d-5925-4ca0-89f2-f447afbf845f-kube-api-access-dz5pf\") pod \"marketplace-operator-79b997595-qlzhc\" (UID: \"c27e1f6d-5925-4ca0-89f2-f447afbf845f\") " pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.167668 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-cps54" Jan 02 00:09:35 crc kubenswrapper[4766]: E0102 00:09:35.167739 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:35.667722277 +0000 UTC m=+150.466209431 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.180619 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.211438 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk7lj\" (UniqueName: \"kubernetes.io/projected/38fd421f-b7a6-4b8a-a623-575d28d65212-kube-api-access-zk7lj\") pod \"package-server-manager-789f6589d5-8g66k\" (UID: \"38fd421f-b7a6-4b8a-a623-575d28d65212\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8g66k" Jan 02 00:09:35 crc kubenswrapper[4766]: W0102 00:09:35.223585 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb2cfe289_2a25_4afa_829a_d44c8cd33fb3.slice/crio-4414f3c79da24c92310fb94a2025d359009c57ee5115be03451adb4e8ae128f8 WatchSource:0}: Error finding container 4414f3c79da24c92310fb94a2025d359009c57ee5115be03451adb4e8ae128f8: Status 404 returned error can't find the container with id 4414f3c79da24c92310fb94a2025d359009c57ee5115be03451adb4e8ae128f8 Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.225271 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5"] Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.235316 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5lk2\" (UniqueName: \"kubernetes.io/projected/933f7496-5243-4054-a3cb-e2383d7ac0ac-kube-api-access-t5lk2\") pod \"dns-default-xldzf\" (UID: \"933f7496-5243-4054-a3cb-e2383d7ac0ac\") " pod="openshift-dns/dns-default-xldzf" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.238999 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7gfp\" (UniqueName: \"kubernetes.io/projected/41878387-8c5a-4e22-ae75-38dcdc3bc28c-kube-api-access-s7gfp\") pod \"service-ca-operator-777779d784-6ldzt\" (UID: \"41878387-8c5a-4e22-ae75-38dcdc3bc28c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-6ldzt" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.262463 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4f6n\" (UniqueName: \"kubernetes.io/projected/0da67e94-91c7-479b-9c5c-a3044ea1a7be-kube-api-access-v4f6n\") pod \"catalog-operator-68c6474976-wkgpj\" (UID: \"0da67e94-91c7-479b-9c5c-a3044ea1a7be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.270760 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:35 crc kubenswrapper[4766]: E0102 00:09:35.272008 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:35.771989026 +0000 UTC m=+150.570476180 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.284114 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chm7b"] Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.285404 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqnjc\" (UniqueName: \"kubernetes.io/projected/f8bcffb8-8576-44bf-a116-5a19996a48a8-kube-api-access-cqnjc\") pod \"multus-admission-controller-857f4d67dd-gsbjs\" (UID: \"f8bcffb8-8576-44bf-a116-5a19996a48a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-gsbjs" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.296336 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bce0b60a-2a62-4f0f-a4e7-75cef356a84e-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l8k7h\" (UID: \"bce0b60a-2a62-4f0f-a4e7-75cef356a84e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l8k7h" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.310101 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-m8p7l" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.318559 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2srpw" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.329769 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/71a9ab84-dad3-47c3-ad6f-d0670e8eca29-bound-sa-token\") pod \"ingress-operator-5b745b69d9-8c5sd\" (UID: \"71a9ab84-dad3-47c3-ad6f-d0670e8eca29\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.337664 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.341085 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q77bn"] Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.343169 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.373506 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:35 crc kubenswrapper[4766]: E0102 00:09:35.373852 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:35.873831505 +0000 UTC m=+150.672318659 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.373953 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.409334 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8g66k" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.409386 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.409744 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-s6kgk" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.409861 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-6ldzt" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.423360 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-gsbjs" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.431613 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l8k7h" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.453105 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-xhj52" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.457323 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-xldzf" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.474629 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:35 crc kubenswrapper[4766]: E0102 00:09:35.475024 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:35.975011817 +0000 UTC m=+150.773498961 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.507832 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5" event={"ID":"a1385abc-0572-4070-bacc-b125c1c42011","Type":"ContainerStarted","Data":"785f553c39c3aa58fb6defbcb63165544cc278d62b2ed4bcc14323407b40ea7f"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.513300 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" event={"ID":"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361","Type":"ContainerStarted","Data":"a8f04fc400d04481fdf50aaed2e13edd169cae76a4e90430a555928e3ef42b3d"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.552792 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.553155 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.553172 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42r8w"] Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.553190 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" event={"ID":"afef674f-42ec-4083-ba3c-803546393a46","Type":"ContainerStarted","Data":"8dcb1c3396348c4b271dbf1ecc11176f3052b10eb7c6304935421a6bec203dd4"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.553364 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" event={"ID":"afef674f-42ec-4083-ba3c-803546393a46","Type":"ContainerStarted","Data":"6d8be454daa89fc65a0fd507c748e28773778222c1556b656c41b29869fa9aa6"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.553392 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-s5ggw" event={"ID":"1f019efc-00d2-4d74-bebf-70782e52182b","Type":"ContainerStarted","Data":"ac49f393a7e0dd72b1ef26453587431d1559eb7071c1ccbba25e42edb6867d66"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.553405 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-482gx" event={"ID":"bb03fd80-3959-4cea-9739-3580d6310a64","Type":"ContainerStarted","Data":"f2eb9322912430f8cad3c93b1d50d15d360079bf90e54d38e68e9c7aecc75ee4"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.553418 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29455200-8qg7p" event={"ID":"89725c43-8074-47e4-9464-52dac202626c","Type":"ContainerStarted","Data":"011c5667faf0bdaf367702a20580cc03f1c7b330e2db809e2bbd53304a7449e7"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.553430 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-22rvv" event={"ID":"73d33fe2-474d-4bb3-8e6f-d80ea22650fd","Type":"ContainerStarted","Data":"d51b2c62aafc5072a7398ec6051e67c6cb52c98b58a172ca6f2401e73e77002f"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.557356 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" event={"ID":"35c9a208-651e-4453-975f-2d069276a9aa","Type":"ContainerStarted","Data":"05e362ea7804f619280f15d1664181659dfb6c5277a1755afccb59d9648c2b46"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.558739 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" event={"ID":"3d3116b8-d190-455f-b0a8-f44f7818b034","Type":"ContainerStarted","Data":"9f5a47d94ed4166e600f25459b94e8a1a2ea2ee3b14cb1432cae6f3dd7f879b0"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.558766 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" event={"ID":"3d3116b8-d190-455f-b0a8-f44f7818b034","Type":"ContainerStarted","Data":"b97923079cc5a316d9cfd65b03b53b7616ded72468ab33c54a103ce225ceabae"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.561911 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" event={"ID":"a59b5466-b9dd-45f1-be0b-18b7afa27942","Type":"ContainerStarted","Data":"0262d45d2f9c480aee3b1bccdc362df873ffaa9d53f27995d5524d3da47620de"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.563044 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-8bc2w" event={"ID":"509add4d-67be-4dff-8194-55516bc7bbd7","Type":"ContainerStarted","Data":"38279eb4b2225dcb36b08764af616ad3fca710f62ce7617b29149133edc91a34"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.563070 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-8bc2w" event={"ID":"509add4d-67be-4dff-8194-55516bc7bbd7","Type":"ContainerStarted","Data":"33f74f7829c43cb9d2c105c215403ee404b23ea5151f9ef0c590bea02bdbef8f"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.564166 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-8bc2w" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.565653 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7" event={"ID":"100eb09d-e755-4cd9-b31f-e2b6920d91f0","Type":"ContainerStarted","Data":"3bef48330d26bd8edba3ae7aeded34471c4a393fb586428535a5726c24d9bf57"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.565688 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7" event={"ID":"100eb09d-e755-4cd9-b31f-e2b6920d91f0","Type":"ContainerStarted","Data":"65274d5709fb28f666a040a1f930ea8b9b4d42e1861a6ce9a74143674abefe35"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.575195 4766 patch_prober.go:28] interesting pod/downloads-7954f5f757-8bc2w container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.575253 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8bc2w" podUID="509add4d-67be-4dff-8194-55516bc7bbd7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.575675 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:35 crc kubenswrapper[4766]: E0102 00:09:35.575888 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:36.07586807 +0000 UTC m=+150.874355224 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.576057 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.576355 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" event={"ID":"6e809e90-d3c7-41ea-b346-103b603acc41","Type":"ContainerStarted","Data":"3f41c165227cd855efa1065a308ca1b866632947a33ae3572462164c9933cf84"} Jan 02 00:09:35 crc kubenswrapper[4766]: E0102 00:09:35.576560 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:36.076545048 +0000 UTC m=+150.875032202 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.576596 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.597433 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.601736 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" event={"ID":"07522b89-e97c-4363-a223-e57f11fa7ec5","Type":"ContainerStarted","Data":"9ebae63f2a8b0a2a11d8e7c41e552278bb39b5f2f820532e8ff4855b5d989109"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.602027 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c7x7f"] Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.651733 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" event={"ID":"b2cfe289-2a25-4afa-829a-d44c8cd33fb3","Type":"ContainerStarted","Data":"4414f3c79da24c92310fb94a2025d359009c57ee5115be03451adb4e8ae128f8"} Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.678808 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:35 crc kubenswrapper[4766]: E0102 00:09:35.695688 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:36.19566276 +0000 UTC m=+150.994149914 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.755588 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.798849 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:35 crc kubenswrapper[4766]: E0102 00:09:35.800371 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:36.30035656 +0000 UTC m=+151.098843714 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.902224 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:35 crc kubenswrapper[4766]: E0102 00:09:35.902659 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:36.40264141 +0000 UTC m=+151.201128564 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:35 crc kubenswrapper[4766]: I0102 00:09:35.970691 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx"] Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.003615 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:36 crc kubenswrapper[4766]: E0102 00:09:36.004043 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:36.504030357 +0000 UTC m=+151.302517511 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.119685 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:36 crc kubenswrapper[4766]: E0102 00:09:36.120247 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:36.620227116 +0000 UTC m=+151.418714270 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.235288 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:36 crc kubenswrapper[4766]: E0102 00:09:36.235687 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:36.735674606 +0000 UTC m=+151.534161760 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.335680 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.336167 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:36 crc kubenswrapper[4766]: E0102 00:09:36.336580 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:36.836563001 +0000 UTC m=+151.635050155 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.406766 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:36 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:36 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:36 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.406818 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.488145 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:36 crc kubenswrapper[4766]: E0102 00:09:36.489589 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:36.989570796 +0000 UTC m=+151.788058020 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.589811 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:36 crc kubenswrapper[4766]: E0102 00:09:36.590094 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:37.09007345 +0000 UTC m=+151.888560604 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.597429 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c7s4x" podStartSLOduration=131.597405905 podStartE2EDuration="2m11.597405905s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:36.550377373 +0000 UTC m=+151.348864537" watchObservedRunningTime="2026-01-02 00:09:36.597405905 +0000 UTC m=+151.395893059" Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.608724 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cps54"] Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.666123 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nxmh7" podStartSLOduration=131.66609545 podStartE2EDuration="2m11.66609545s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:36.658515709 +0000 UTC m=+151.457002863" watchObservedRunningTime="2026-01-02 00:09:36.66609545 +0000 UTC m=+151.464582604" Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.670531 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-s5ggw" event={"ID":"1f019efc-00d2-4d74-bebf-70782e52182b","Type":"ContainerStarted","Data":"1da33135aa1680388d944002524248e4322879fe925b12e6aaf3488d5d4df6d1"} Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.673817 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-s5ggw" Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.681829 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v"] Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.681912 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7tp"] Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.681994 4766 patch_prober.go:28] interesting pod/console-operator-58897d9998-s5ggw container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.682043 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-s5ggw" podUID="1f019efc-00d2-4d74-bebf-70782e52182b" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.683086 4766 generic.go:334] "Generic (PLEG): container finished" podID="f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361" containerID="36ee08b15a38eb35acc40cac019fc0f66a5ea583b4c56d8458dbdc3fa87f45be" exitCode=0 Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.683137 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" event={"ID":"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361","Type":"ContainerDied","Data":"36ee08b15a38eb35acc40cac019fc0f66a5ea583b4c56d8458dbdc3fa87f45be"} Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.692908 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:36 crc kubenswrapper[4766]: E0102 00:09:36.693211 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:37.193201231 +0000 UTC m=+151.991688385 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.700944 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" podStartSLOduration=131.700930136 podStartE2EDuration="2m11.700930136s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:36.69992903 +0000 UTC m=+151.498416184" watchObservedRunningTime="2026-01-02 00:09:36.700930136 +0000 UTC m=+151.499417290" Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.710378 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" event={"ID":"a59b5466-b9dd-45f1-be0b-18b7afa27942","Type":"ContainerStarted","Data":"615d38b6db0906847ce872edce6e064a9e814a695f741438c5765453e39eddb1"} Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.716985 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29455200-8qg7p" event={"ID":"89725c43-8074-47e4-9464-52dac202626c","Type":"ContainerStarted","Data":"229cefade20d52b000c50b31c78c287d3cae7fab036bcda7ad3df2f8697c7b36"} Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.717950 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c7x7f" event={"ID":"8fc392bf-f07e-4e8c-b95c-76da34bebdb9","Type":"ContainerStarted","Data":"f7437e7d5bd742ccf34be59b74fc4831ec55249ca94900fbe2eaca3708f5c87f"} Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.719126 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-482gx" event={"ID":"bb03fd80-3959-4cea-9739-3580d6310a64","Type":"ContainerStarted","Data":"cf663be504e254b303461c6e3498c05618c95f85900241cab9b979642e6615be"} Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.728268 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42r8w" event={"ID":"bad33be4-1110-4703-b227-8e8b58184dbc","Type":"ContainerStarted","Data":"31a3782aa69157855bd116251fa3ca404b465bf9e3fa165aa03a488431224f47"} Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.730757 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-mnk85" event={"ID":"022dc757-fcdb-4844-9d9a-ef946049094e","Type":"ContainerStarted","Data":"7e20da7076614ed3a7c57693c68762179b232daf7194bd715e50f43c40ebe95d"} Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.736193 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" event={"ID":"3d3116b8-d190-455f-b0a8-f44f7818b034","Type":"ContainerStarted","Data":"f81750279d8c42f105ab500a8aadda65b97219224f3f0c54f3cd1182abca2fb8"} Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.742103 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chm7b" event={"ID":"ad7ca0d9-9d5c-4f2a-bc37-4c75415b7204","Type":"ContainerStarted","Data":"338076a5c4aa5cbb21f063ee6b6a6687f36d6b9af02f20ccfdd3b4e5f3b675ef"} Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.745004 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" event={"ID":"35c9a208-651e-4453-975f-2d069276a9aa","Type":"ContainerStarted","Data":"09beee16b1a97bc8b94cc4cf1b0cba1ee0667d6050711a617c1ab5244753cdb7"} Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.748306 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.758267 4766 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-czkht container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.14:6443/healthz\": dial tcp 10.217.0.14:6443: connect: connection refused" start-of-body= Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.758329 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" podUID="35c9a208-651e-4453-975f-2d069276a9aa" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.14:6443/healthz\": dial tcp 10.217.0.14:6443: connect: connection refused" Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.760034 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" event={"ID":"d31454d7-8948-4164-a060-cfe7b8bb15e4","Type":"ContainerStarted","Data":"3ba799835993e86ad9b711967a3ae7e03ea4ec215a9ed11ed32d205f9df138b8"} Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.761515 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" event={"ID":"46dc25cf-00e9-4525-854e-5acd530ae9f6","Type":"ContainerStarted","Data":"2646984d737faf6944f78d90a428352f90bd8c4eaef38c06de269a51dc2f41a8"} Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.763714 4766 patch_prober.go:28] interesting pod/downloads-7954f5f757-8bc2w container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.763747 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8bc2w" podUID="509add4d-67be-4dff-8194-55516bc7bbd7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.793912 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:36 crc kubenswrapper[4766]: E0102 00:09:36.797796 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:37.297737538 +0000 UTC m=+152.096224692 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.801815 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:36 crc kubenswrapper[4766]: E0102 00:09:36.803596 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:37.303572564 +0000 UTC m=+152.102059718 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.845674 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" podStartSLOduration=131.845655061 podStartE2EDuration="2m11.845655061s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:36.845284042 +0000 UTC m=+151.643771196" watchObservedRunningTime="2026-01-02 00:09:36.845655061 +0000 UTC m=+151.644142215" Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.909208 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:36 crc kubenswrapper[4766]: E0102 00:09:36.920114 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:37.420087781 +0000 UTC m=+152.218574975 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:36 crc kubenswrapper[4766]: I0102 00:09:36.958954 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-8bc2w" podStartSLOduration=131.958931077 podStartE2EDuration="2m11.958931077s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:36.958513037 +0000 UTC m=+151.757000191" watchObservedRunningTime="2026-01-02 00:09:36.958931077 +0000 UTC m=+151.757418231" Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.005621 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" podStartSLOduration=131.005599149 podStartE2EDuration="2m11.005599149s" podCreationTimestamp="2026-01-02 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:37.004563634 +0000 UTC m=+151.803050798" watchObservedRunningTime="2026-01-02 00:09:37.005599149 +0000 UTC m=+151.804086303" Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.012425 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:37 crc kubenswrapper[4766]: E0102 00:09:37.012821 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:37.51280445 +0000 UTC m=+152.311291604 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.116557 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:37 crc kubenswrapper[4766]: E0102 00:09:37.116738 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:37.616710721 +0000 UTC m=+152.415197875 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.117318 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:37 crc kubenswrapper[4766]: E0102 00:09:37.117641 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:37.617624434 +0000 UTC m=+152.416111588 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.120830 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-22rvv" podStartSLOduration=132.120812064 podStartE2EDuration="2m12.120812064s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:37.12028601 +0000 UTC m=+151.918773164" watchObservedRunningTime="2026-01-02 00:09:37.120812064 +0000 UTC m=+151.919299218" Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.220659 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:37 crc kubenswrapper[4766]: E0102 00:09:37.221415 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:37.72138876 +0000 UTC m=+152.519875914 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.313885 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cj6b2" podStartSLOduration=132.313848183 podStartE2EDuration="2m12.313848183s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:37.312437998 +0000 UTC m=+152.110925172" watchObservedRunningTime="2026-01-02 00:09:37.313848183 +0000 UTC m=+152.112335337" Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.332557 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:37 crc kubenswrapper[4766]: E0102 00:09:37.333427 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:37.833399904 +0000 UTC m=+152.631887058 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.347783 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-482gx" podStartSLOduration=132.347744176 podStartE2EDuration="2m12.347744176s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:37.34555735 +0000 UTC m=+152.144044504" watchObservedRunningTime="2026-01-02 00:09:37.347744176 +0000 UTC m=+152.146231330" Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.372346 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-s5ggw" podStartSLOduration=132.372298412 podStartE2EDuration="2m12.372298412s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:37.369975104 +0000 UTC m=+152.168462258" watchObservedRunningTime="2026-01-02 00:09:37.372298412 +0000 UTC m=+152.170785736" Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.416286 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" podStartSLOduration=132.416258417 podStartE2EDuration="2m12.416258417s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:37.41357392 +0000 UTC m=+152.212061074" watchObservedRunningTime="2026-01-02 00:09:37.416258417 +0000 UTC m=+152.214745571" Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.434380 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:37 crc kubenswrapper[4766]: E0102 00:09:37.434590 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:37.934563697 +0000 UTC m=+152.733050851 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.434764 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:37 crc kubenswrapper[4766]: E0102 00:09:37.436239 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:37.936222548 +0000 UTC m=+152.734709702 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.536413 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:37 crc kubenswrapper[4766]: E0102 00:09:37.537374 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:38.037353049 +0000 UTC m=+152.835840203 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.551403 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-pruner-29455200-8qg7p" podStartSLOduration=132.551388191 podStartE2EDuration="2m12.551388191s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:37.547658747 +0000 UTC m=+152.346145901" watchObservedRunningTime="2026-01-02 00:09:37.551388191 +0000 UTC m=+152.349875345" Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.637431 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-pthnj" podStartSLOduration=132.637412123 podStartE2EDuration="2m12.637412123s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:37.63456667 +0000 UTC m=+152.433053824" watchObservedRunningTime="2026-01-02 00:09:37.637412123 +0000 UTC m=+152.435899277" Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.641618 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:37 crc kubenswrapper[4766]: E0102 00:09:37.641944 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:38.141933756 +0000 UTC m=+152.940420900 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.739530 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:37 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:37 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:37 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.739982 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.745838 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:37 crc kubenswrapper[4766]: E0102 00:09:37.746355 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:38.246336889 +0000 UTC m=+153.044824043 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.791282 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7tp" event={"ID":"8c7ee93d-7d23-4ab4-9d8d-2909bd385c53","Type":"ContainerStarted","Data":"10b3f94128328a0f9686a1b3c2d93ba53f0ef5b58f4c28406423e965e5ec3a6e"} Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.795369 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" event={"ID":"b18850a4-4232-407e-8548-1ec0e4d7236b","Type":"ContainerStarted","Data":"59e0e0581515b1296eef5970ec8c096eea5cadd0c2ba7581933eacdef2f6604d"} Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.802591 4766 generic.go:334] "Generic (PLEG): container finished" podID="a1385abc-0572-4070-bacc-b125c1c42011" containerID="f067df036e69beaaa38e9450ea2042bcdadf245c75a3809dd5b372f552007982" exitCode=0 Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.802683 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5" event={"ID":"a1385abc-0572-4070-bacc-b125c1c42011","Type":"ContainerDied","Data":"f067df036e69beaaa38e9450ea2042bcdadf245c75a3809dd5b372f552007982"} Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.816946 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chm7b" event={"ID":"ad7ca0d9-9d5c-4f2a-bc37-4c75415b7204","Type":"ContainerStarted","Data":"c97ceeaf56a4f045111a4da40abc0f207157152893b1bea43eba7ec36aa539fc"} Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.824095 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42r8w" event={"ID":"bad33be4-1110-4703-b227-8e8b58184dbc","Type":"ContainerStarted","Data":"a62a4ae29094bb5ac3759c37ae5c8a27797b972201fe3509d4465e33dc4c01d4"} Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.831193 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" event={"ID":"b2cfe289-2a25-4afa-829a-d44c8cd33fb3","Type":"ContainerStarted","Data":"1e1dc9dfc2184dd6fa171bb9815cb42bb3f61b4a23d517a2d30cde06c59b5a2f"} Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.833496 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-mnk85" event={"ID":"022dc757-fcdb-4844-9d9a-ef946049094e","Type":"ContainerStarted","Data":"64ed00380d96307751e009f34557e289b91f0fe969984da3af9f72f5279272b3"} Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.835051 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cps54" event={"ID":"89735c6e-0dba-4fc9-aa99-e6359999d6c9","Type":"ContainerStarted","Data":"325855b4be26f0355b1cf6dd5aaadece18e50d492354eec37e73e25ed0994f33"} Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.837951 4766 patch_prober.go:28] interesting pod/downloads-7954f5f757-8bc2w container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.838006 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8bc2w" podUID="509add4d-67be-4dff-8194-55516bc7bbd7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.840635 4766 patch_prober.go:28] interesting pod/console-operator-58897d9998-s5ggw container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.840741 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-s5ggw" podUID="1f019efc-00d2-4d74-bebf-70782e52182b" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.847312 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:37 crc kubenswrapper[4766]: E0102 00:09:37.850100 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:38.350078485 +0000 UTC m=+153.148565709 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.928501 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-mnk85" podStartSLOduration=6.928470644 podStartE2EDuration="6.928470644s" podCreationTimestamp="2026-01-02 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:37.892175972 +0000 UTC m=+152.690663136" watchObservedRunningTime="2026-01-02 00:09:37.928470644 +0000 UTC m=+152.726957798" Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.929259 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42r8w" podStartSLOduration=132.929252784 podStartE2EDuration="2m12.929252784s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:37.917132579 +0000 UTC m=+152.715619763" watchObservedRunningTime="2026-01-02 00:09:37.929252784 +0000 UTC m=+152.727739938" Jan 02 00:09:37 crc kubenswrapper[4766]: I0102 00:09:37.952007 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:37 crc kubenswrapper[4766]: E0102 00:09:37.954473 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:38.454379906 +0000 UTC m=+153.252867070 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.058690 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:38 crc kubenswrapper[4766]: E0102 00:09:38.061068 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:38.561053565 +0000 UTC m=+153.359540719 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.161956 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:38 crc kubenswrapper[4766]: E0102 00:09:38.163007 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:38.662973986 +0000 UTC m=+153.461461140 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.163039 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:38 crc kubenswrapper[4766]: E0102 00:09:38.164094 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:38.664082284 +0000 UTC m=+153.462569448 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.264058 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:38 crc kubenswrapper[4766]: E0102 00:09:38.264226 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:38.764195409 +0000 UTC m=+153.562682573 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.264707 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:38 crc kubenswrapper[4766]: E0102 00:09:38.265004 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:38.764993199 +0000 UTC m=+153.563480353 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.369559 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:38 crc kubenswrapper[4766]: E0102 00:09:38.369989 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:38.869967717 +0000 UTC m=+153.668454871 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.373621 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:38 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:38 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:38 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.373667 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.441232 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.451065 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.477678 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:38 crc kubenswrapper[4766]: E0102 00:09:38.479587 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:38.9795737 +0000 UTC m=+153.778060854 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.483099 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.583529 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:38 crc kubenswrapper[4766]: E0102 00:09:38.584079 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:39.084063035 +0000 UTC m=+153.882550189 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.597762 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.691224 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:38 crc kubenswrapper[4766]: E0102 00:09:38.691717 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:39.19170166 +0000 UTC m=+153.990188814 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.721219 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qlzhc"] Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.794191 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:38 crc kubenswrapper[4766]: E0102 00:09:38.794452 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:39.29443687 +0000 UTC m=+154.092924024 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.880626 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-b6fbc"] Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.890023 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7tp" event={"ID":"8c7ee93d-7d23-4ab4-9d8d-2909bd385c53","Type":"ContainerStarted","Data":"b85fdec7be1fab95f55c788fd3abe9767a352d35ef60be790cc83b6c77adb97a"} Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.898252 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:38 crc kubenswrapper[4766]: E0102 00:09:38.898593 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:39.398581416 +0000 UTC m=+154.197068570 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.936463 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" event={"ID":"b18850a4-4232-407e-8548-1ec0e4d7236b","Type":"ContainerStarted","Data":"9448ac16e8867d64eb3a252b62a3eba007e54eb753b648c72a30e237443b093c"} Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.936756 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.983942 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7tp" podStartSLOduration=133.98391849 podStartE2EDuration="2m13.98391849s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:38.936335905 +0000 UTC m=+153.734823069" watchObservedRunningTime="2026-01-02 00:09:38.98391849 +0000 UTC m=+153.782405644" Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.986368 4766 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-7t82v container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:5443/healthz\": dial tcp 10.217.0.36:5443: connect: connection refused" start-of-body= Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.986415 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" podUID="b18850a4-4232-407e-8548-1ec0e4d7236b" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.36:5443/healthz\": dial tcp 10.217.0.36:5443: connect: connection refused" Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.986594 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" event={"ID":"c27e1f6d-5925-4ca0-89f2-f447afbf845f","Type":"ContainerStarted","Data":"dac2740a9d23a0d280670afc5c9de0f651f8c0e9d02a8c3e2e036bf59914c247"} Jan 02 00:09:38 crc kubenswrapper[4766]: I0102 00:09:38.995068 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" podStartSLOduration=132.99504325 podStartE2EDuration="2m12.99504325s" podCreationTimestamp="2026-01-02 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:38.986922856 +0000 UTC m=+153.785410020" watchObservedRunningTime="2026-01-02 00:09:38.99504325 +0000 UTC m=+153.793530394" Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.005540 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:39 crc kubenswrapper[4766]: E0102 00:09:39.006537 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:39.506520838 +0000 UTC m=+154.305007992 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.029372 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chm7b" event={"ID":"ad7ca0d9-9d5c-4f2a-bc37-4c75415b7204","Type":"ContainerStarted","Data":"1ec1c44130fb36825ddb716fb17d13affab8b9b2eb3a367c737f985cffa6ac69"} Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.108999 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:39 crc kubenswrapper[4766]: E0102 00:09:39.110196 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:39.610180322 +0000 UTC m=+154.408667476 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.113048 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" event={"ID":"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361","Type":"ContainerStarted","Data":"25bc6b336c6f33c2449d43a6116d4712044b53f9a6f5687093537c913909417e"} Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.113099 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" event={"ID":"f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361","Type":"ContainerStarted","Data":"7bcf94fabce6ca89b426d535b63366d5d4893fd814a37b85944604ac5b720876"} Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.137844 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chm7b" podStartSLOduration=134.137825627 podStartE2EDuration="2m14.137825627s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:39.077049451 +0000 UTC m=+153.875536625" watchObservedRunningTime="2026-01-02 00:09:39.137825627 +0000 UTC m=+153.936312781" Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.140320 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c7x7f" event={"ID":"8fc392bf-f07e-4e8c-b95c-76da34bebdb9","Type":"ContainerStarted","Data":"8d516b9a840d341c0178362da8a410392abf9a69d02a3adf0d7bf820d8265da6"} Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.144497 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2srpw"] Jan 02 00:09:39 crc kubenswrapper[4766]: W0102 00:09:39.175150 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5901ea26_5679_4e06_b297_4fcbecbdd0ba.slice/crio-6e50568ebcc864de57b9479d25d1ed020e7bdfee545acda21d27daaf5c0216d4 WatchSource:0}: Error finding container 6e50568ebcc864de57b9479d25d1ed020e7bdfee545acda21d27daaf5c0216d4: Status 404 returned error can't find the container with id 6e50568ebcc864de57b9479d25d1ed020e7bdfee545acda21d27daaf5c0216d4 Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.207405 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" event={"ID":"d31454d7-8948-4164-a060-cfe7b8bb15e4","Type":"ContainerStarted","Data":"543df70756f0d92a41d7cfb4ba400740cf6858a2b64d80b05ed050e6bcf57096"} Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.207462 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" event={"ID":"d31454d7-8948-4164-a060-cfe7b8bb15e4","Type":"ContainerStarted","Data":"40a10f6586b0e947420570b4604c85d7df9f5d95f8dac4db77a7a1117e35ce4a"} Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.208427 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" podStartSLOduration=134.208416381 podStartE2EDuration="2m14.208416381s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:39.206723888 +0000 UTC m=+154.005211042" watchObservedRunningTime="2026-01-02 00:09:39.208416381 +0000 UTC m=+154.006903535" Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.210459 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:39 crc kubenswrapper[4766]: E0102 00:09:39.211295 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:39.711280173 +0000 UTC m=+154.509767327 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.240957 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" event={"ID":"46dc25cf-00e9-4525-854e-5acd530ae9f6","Type":"ContainerStarted","Data":"4c43931ed1c13fe1118b9bc9ebd91d886a43378837b797379720874f937b34f7"} Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.285940 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wjtkd"] Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.296072 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-6ldzt"] Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.314661 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-xhj52"] Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.316612 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-gsbjs"] Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.324442 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:39 crc kubenswrapper[4766]: E0102 00:09:39.326266 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:39.826253281 +0000 UTC m=+154.624740435 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.335390 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cps54" event={"ID":"89735c6e-0dba-4fc9-aa99-e6359999d6c9","Type":"ContainerStarted","Data":"81ceb0dc45adebfa1b731a577dc12266d14fc68bd20a8773f3e1f280510d86da"} Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.347688 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-m8p7l"] Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.347752 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:39 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:39 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:39 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.347810 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.369699 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.370035 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.380160 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c7x7f" podStartSLOduration=134.380133295 podStartE2EDuration="2m14.380133295s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:39.335923554 +0000 UTC m=+154.134410708" watchObservedRunningTime="2026-01-02 00:09:39.380133295 +0000 UTC m=+154.178620449" Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.381413 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-s6kgk"] Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.385828 4766 patch_prober.go:28] interesting pod/apiserver-76f77b778f-cnd4q container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.385891 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" podUID="f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.386961 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" event={"ID":"b2cfe289-2a25-4afa-829a-d44c8cd33fb3","Type":"ContainerStarted","Data":"deea94592b6aedc0a4e66c4a3a53dbe581941c3d0f95678bf69385b1faee59f7"} Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.393491 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l8k7h"] Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.396988 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv"] Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.411441 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b64cx" podStartSLOduration=134.411415331 podStartE2EDuration="2m14.411415331s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:39.394501795 +0000 UTC m=+154.192988959" watchObservedRunningTime="2026-01-02 00:09:39.411415331 +0000 UTC m=+154.209902485" Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.414316 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-xldzf"] Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.426396 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:39 crc kubenswrapper[4766]: E0102 00:09:39.428074 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:39.928048368 +0000 UTC m=+154.726535562 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:39 crc kubenswrapper[4766]: W0102 00:09:39.428392 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod865bdbd0_cd9b_45d3_95f6_a821ad964b1f.slice/crio-a1a8cf45911bd17fc6508134d7fea5ac4f47b5dba9717d9f656dad1bc1daeeb0 WatchSource:0}: Error finding container a1a8cf45911bd17fc6508134d7fea5ac4f47b5dba9717d9f656dad1bc1daeeb0: Status 404 returned error can't find the container with id a1a8cf45911bd17fc6508134d7fea5ac4f47b5dba9717d9f656dad1bc1daeeb0 Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.428982 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5" event={"ID":"a1385abc-0572-4070-bacc-b125c1c42011","Type":"ContainerStarted","Data":"838d13f68e889cb895f2029783a1949f198e084a3be3871b69a135eaf5275e0f"} Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.430390 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd"] Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.435903 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj"] Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.452286 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-cps54" podStartSLOduration=134.452260207 podStartE2EDuration="2m14.452260207s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:39.428793797 +0000 UTC m=+154.227280951" watchObservedRunningTime="2026-01-02 00:09:39.452260207 +0000 UTC m=+154.250747361" Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.455545 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l"] Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.466318 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cv2dl" Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.467419 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8g66k"] Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.476541 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7"] Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.477428 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-q77bn" podStartSLOduration=134.477411769 podStartE2EDuration="2m14.477411769s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:39.46672916 +0000 UTC m=+154.265216334" watchObservedRunningTime="2026-01-02 00:09:39.477411769 +0000 UTC m=+154.275898923" Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.528982 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:39 crc kubenswrapper[4766]: E0102 00:09:39.531474 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:40.031461276 +0000 UTC m=+154.829948430 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.547155 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5" podStartSLOduration=134.54713721 podStartE2EDuration="2m14.54713721s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:39.546670338 +0000 UTC m=+154.345157492" watchObservedRunningTime="2026-01-02 00:09:39.54713721 +0000 UTC m=+154.345624364" Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.632376 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-j4kpl" podStartSLOduration=134.632353791 podStartE2EDuration="2m14.632353791s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:39.629356036 +0000 UTC m=+154.427843200" watchObservedRunningTime="2026-01-02 00:09:39.632353791 +0000 UTC m=+154.430840955" Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.676130 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:39 crc kubenswrapper[4766]: E0102 00:09:39.676890 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:40.176836969 +0000 UTC m=+154.975324123 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.787130 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:39 crc kubenswrapper[4766]: E0102 00:09:39.787946 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:40.287931819 +0000 UTC m=+155.086418973 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.888885 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:39 crc kubenswrapper[4766]: E0102 00:09:39.889001 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:40.388984539 +0000 UTC m=+155.187471693 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.889168 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:39 crc kubenswrapper[4766]: E0102 00:09:39.889433 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:40.38942707 +0000 UTC m=+155.187914224 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:39 crc kubenswrapper[4766]: I0102 00:09:39.993601 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:39 crc kubenswrapper[4766]: E0102 00:09:39.994418 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:40.494397377 +0000 UTC m=+155.292884531 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.096248 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:40 crc kubenswrapper[4766]: E0102 00:09:40.096654 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:40.596639216 +0000 UTC m=+155.395126370 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.197911 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:40 crc kubenswrapper[4766]: E0102 00:09:40.198458 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:40.698437953 +0000 UTC m=+155.496925107 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.300099 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:40 crc kubenswrapper[4766]: E0102 00:09:40.300502 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:40.800487757 +0000 UTC m=+155.598974911 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.338388 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:40 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:40 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:40 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.338446 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.401347 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:40 crc kubenswrapper[4766]: E0102 00:09:40.401523 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:40.901496205 +0000 UTC m=+155.699983349 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.401737 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:40 crc kubenswrapper[4766]: E0102 00:09:40.402115 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:40.902098429 +0000 UTC m=+155.700585583 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.440333 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" event={"ID":"71a9ab84-dad3-47c3-ad6f-d0670e8eca29","Type":"ContainerStarted","Data":"c746689f17c42a8687a826755f7c678be781d8e296191dba8e36ca299a076336"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.458220 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8g66k" event={"ID":"38fd421f-b7a6-4b8a-a623-575d28d65212","Type":"ContainerStarted","Data":"25fc3b5c564f1615511d580a8e1b783b514c57ee769c23f553d75658541b63d9"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.466698 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" event={"ID":"c27e1f6d-5925-4ca0-89f2-f447afbf845f","Type":"ContainerStarted","Data":"d2330ceeb23365890361dd88cc88eda1b72fe700da0c86ecbb54523ef42aef76"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.466760 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.469008 4766 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-qlzhc container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.469032 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" event={"ID":"49a72ec1-ed97-4bc9-947b-1df688c6676d","Type":"ContainerStarted","Data":"cbecda82a0c69c935897745d00b8815899227c5bfe1029091ac8a39494dd4436"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.469071 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" podUID="c27e1f6d-5925-4ca0-89f2-f447afbf845f" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.470398 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" event={"ID":"16d8672b-0a81-48e3-bbda-d4f2ee461924","Type":"ContainerStarted","Data":"c9106de1c440074115c44f6b14603939803a5cd16a1a37176ffc7240e8186d67"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.482166 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cps54" event={"ID":"89735c6e-0dba-4fc9-aa99-e6359999d6c9","Type":"ContainerStarted","Data":"c4e3ac03e7de46b6fa2d93159250f78d5e517e0d6f9f87de2cfab7d14454d326"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.485035 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" event={"ID":"add97db2-55ef-4d2b-89d3-32453f54579a","Type":"ContainerStarted","Data":"345821705fe0b15e0ad2f654991630ef3ec20229554ab2f0b921c2ec683360af"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.490149 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7" event={"ID":"feeca0b4-25d8-417d-a73c-64a386970b13","Type":"ContainerStarted","Data":"5ac380efc92385f68eec119aa3653a0d73fadefa79f8416969013df6678f1649"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.491025 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-xhj52" event={"ID":"865bdbd0-cd9b-45d3-95f6-a821ad964b1f","Type":"ContainerStarted","Data":"a1a8cf45911bd17fc6508134d7fea5ac4f47b5dba9717d9f656dad1bc1daeeb0"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.491168 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" podStartSLOduration=135.491116446 podStartE2EDuration="2m15.491116446s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:40.489181287 +0000 UTC m=+155.287668451" watchObservedRunningTime="2026-01-02 00:09:40.491116446 +0000 UTC m=+155.289603600" Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.491838 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l8k7h" event={"ID":"bce0b60a-2a62-4f0f-a4e7-75cef356a84e","Type":"ContainerStarted","Data":"af74af1283a403e6288ebcf0ecd00f7141edc5efb63214e667d4c2a729520821"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.494566 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2srpw" event={"ID":"5901ea26-5679-4e06-b297-4fcbecbdd0ba","Type":"ContainerStarted","Data":"60c5b41b1d13c7a361e906298a77cadaf29c96ee5c533ef1d488353d5fd41e45"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.494610 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2srpw" event={"ID":"5901ea26-5679-4e06-b297-4fcbecbdd0ba","Type":"ContainerStarted","Data":"6e50568ebcc864de57b9479d25d1ed020e7bdfee545acda21d27daaf5c0216d4"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.497056 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xldzf" event={"ID":"933f7496-5243-4054-a3cb-e2383d7ac0ac","Type":"ContainerStarted","Data":"9fe61b00ea08730df1936d37273be6e9d902ec19f9d9217bca4cc45500a98a52"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.498771 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-s6kgk" event={"ID":"a5536f9b-c0e4-4b6c-9b28-65a9766ca00b","Type":"ContainerStarted","Data":"3a511ca9377e9e0238eb61040055c9d9a073aee0bcd83156167d2ec1029b2f22"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.502242 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:40 crc kubenswrapper[4766]: E0102 00:09:40.502741 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:41.002722537 +0000 UTC m=+155.801209691 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.503150 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-gsbjs" event={"ID":"f8bcffb8-8576-44bf-a116-5a19996a48a8","Type":"ContainerStarted","Data":"536c9c6854094f03a2823a93eeb839084f511912100a9bd94fc9a5ac3d284c6a"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.507331 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b6fbc" event={"ID":"10e74978-bccd-4d24-8f14-e81a37704a07","Type":"ContainerStarted","Data":"f8c7f1b85f54237b0dada69fd1dee3500555bfc7d91c9a189bd89d614ff875ff"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.507371 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b6fbc" event={"ID":"10e74978-bccd-4d24-8f14-e81a37704a07","Type":"ContainerStarted","Data":"20f1ba9c2509607895145271872052b4475621f8b93f8fc4f72ba2dfad4d7fc9"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.508321 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" event={"ID":"0da67e94-91c7-479b-9c5c-a3044ea1a7be","Type":"ContainerStarted","Data":"a436159c21cc504080cb92621feba73a4916372614cd0c4812df8a3fa925bab0"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.509305 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-m8p7l" event={"ID":"8677afa8-a25b-4b5f-9c3c-bc13a667b9f0","Type":"ContainerStarted","Data":"daf832bd9f60b682bf16346c901a00272584030f948212472a43d697ee6398de"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.510500 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-6ldzt" event={"ID":"41878387-8c5a-4e22-ae75-38dcdc3bc28c","Type":"ContainerStarted","Data":"b974010496bef15ebdfbe9a021b5f7a4b37202346f12ed814032b462c2c2629e"} Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.511911 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5" Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.606212 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:40 crc kubenswrapper[4766]: E0102 00:09:40.610225 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:41.110209208 +0000 UTC m=+155.908696362 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.707766 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:40 crc kubenswrapper[4766]: E0102 00:09:40.708127 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:41.208111058 +0000 UTC m=+156.006598212 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.808880 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:40 crc kubenswrapper[4766]: E0102 00:09:40.809180 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:41.309168656 +0000 UTC m=+156.107655810 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.909635 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:40 crc kubenswrapper[4766]: E0102 00:09:40.909932 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:41.409902917 +0000 UTC m=+156.208390071 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:40 crc kubenswrapper[4766]: I0102 00:09:40.910313 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:40 crc kubenswrapper[4766]: E0102 00:09:40.910643 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:41.410628575 +0000 UTC m=+156.209115729 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.018864 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:41 crc kubenswrapper[4766]: E0102 00:09:41.019218 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:41.519199703 +0000 UTC m=+156.317686847 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.121008 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:41 crc kubenswrapper[4766]: E0102 00:09:41.121321 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:41.621307358 +0000 UTC m=+156.419794502 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.222381 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:41 crc kubenswrapper[4766]: E0102 00:09:41.222545 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:41.722522241 +0000 UTC m=+156.521009395 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.222741 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:41 crc kubenswrapper[4766]: E0102 00:09:41.223086 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:41.723075054 +0000 UTC m=+156.521562208 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.329431 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:41 crc kubenswrapper[4766]: E0102 00:09:41.329780 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:41.829763376 +0000 UTC m=+156.628250530 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.339692 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:41 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:41 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:41 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.339750 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.427602 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7t82v" Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.434610 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:41 crc kubenswrapper[4766]: E0102 00:09:41.434991 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:41.934979169 +0000 UTC m=+156.733466323 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.560756 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:41 crc kubenswrapper[4766]: E0102 00:09:41.561101 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:42.061070366 +0000 UTC m=+156.859557530 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.561329 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:41 crc kubenswrapper[4766]: E0102 00:09:41.561752 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:42.061732263 +0000 UTC m=+156.860219417 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.636510 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" event={"ID":"16d8672b-0a81-48e3-bbda-d4f2ee461924","Type":"ContainerStarted","Data":"83b9f8f1f452111135e4e7c1e637340465876924db8c567281bae679179a1954"} Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.637204 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.664836 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:41 crc kubenswrapper[4766]: E0102 00:09:41.665000 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:42.164961906 +0000 UTC m=+156.963449060 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.665388 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:41 crc kubenswrapper[4766]: E0102 00:09:41.665821 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:42.165807668 +0000 UTC m=+156.964294822 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.744340 4766 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-5j85l container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.27:8443/healthz\": dial tcp 10.217.0.27:8443: connect: connection refused" start-of-body= Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.744461 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" podUID="16d8672b-0a81-48e3-bbda-d4f2ee461924" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.27:8443/healthz\": dial tcp 10.217.0.27:8443: connect: connection refused" Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.745974 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-xhj52" event={"ID":"865bdbd0-cd9b-45d3-95f6-a821ad964b1f","Type":"ContainerStarted","Data":"585f653df27d6d682203273aaf7b5f41f2b186ae8803e2deb04c34cd15d2c8ce"} Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.747927 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7" event={"ID":"feeca0b4-25d8-417d-a73c-64a386970b13","Type":"ContainerStarted","Data":"d62acc93dd8d7b8f305ca0b5413814f2c71ce912bb8eb431758cbbf8130f3dda"} Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.748013 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7" event={"ID":"feeca0b4-25d8-417d-a73c-64a386970b13","Type":"ContainerStarted","Data":"2b3cd08339f95c5a68db22ffe3142534063795a86e9cd209468d39251220e9d6"} Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.786546 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:41 crc kubenswrapper[4766]: E0102 00:09:41.787913 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:42.287896555 +0000 UTC m=+157.086383699 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.816967 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8g66k" event={"ID":"38fd421f-b7a6-4b8a-a623-575d28d65212","Type":"ContainerStarted","Data":"871e2f5d33d2bf4ec9c8e7214b7f50a08d1d367f1b02ccfd4b163c65d9e52701"} Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.819216 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" podStartSLOduration=136.819193651 podStartE2EDuration="2m16.819193651s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:41.816005761 +0000 UTC m=+156.614492915" watchObservedRunningTime="2026-01-02 00:09:41.819193651 +0000 UTC m=+156.617680805" Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.820406 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xldzf" event={"ID":"933f7496-5243-4054-a3cb-e2383d7ac0ac","Type":"ContainerStarted","Data":"ca213b2d177c71a9606965a49c79b5d8556404545e35690a50dad52b760ea6fb"} Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.821217 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-xldzf" Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.840207 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b6fbc" event={"ID":"10e74978-bccd-4d24-8f14-e81a37704a07","Type":"ContainerStarted","Data":"e5fe583e4eafacb71111b651f71525bd609bc472b777b015ba0158a5ea3a19ed"} Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.842309 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" event={"ID":"0da67e94-91c7-479b-9c5c-a3044ea1a7be","Type":"ContainerStarted","Data":"8efc63e1b22a3aacb8c538cddad0d7ac6aa9d95da59ace2243d351b4472380e1"} Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.842961 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.846916 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-xhj52" podStartSLOduration=9.846903617 podStartE2EDuration="9.846903617s" podCreationTimestamp="2026-01-02 00:09:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:41.845062491 +0000 UTC m=+156.643549655" watchObservedRunningTime="2026-01-02 00:09:41.846903617 +0000 UTC m=+156.645390771" Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.861455 4766 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-wkgpj container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" start-of-body= Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.861593 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" podUID="0da67e94-91c7-479b-9c5c-a3044ea1a7be" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.862991 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" event={"ID":"49a72ec1-ed97-4bc9-947b-1df688c6676d","Type":"ContainerStarted","Data":"00d9e6f95bbe9f24ced66da0d4c53dc5bddf495d4d59f6a05eba8ba0dfa768a0"} Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.867453 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-6ldzt" event={"ID":"41878387-8c5a-4e22-ae75-38dcdc3bc28c","Type":"ContainerStarted","Data":"4d8e3e4f2738c01e31c5185e5566dfe452d15efd54874b416a20217e68cf1284"} Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.874463 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l8k7h" event={"ID":"bce0b60a-2a62-4f0f-a4e7-75cef356a84e","Type":"ContainerStarted","Data":"29e1ffe07b91156d45d686cbf40b0e40dd77252b9b06b0cf4f79d33ad57add50"} Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.888504 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" event={"ID":"71a9ab84-dad3-47c3-ad6f-d0670e8eca29","Type":"ContainerStarted","Data":"0c1593a530af982c15c28bd867dc22359fff5d90f88ce2005481f7fa6349f2a6"} Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.888565 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" event={"ID":"71a9ab84-dad3-47c3-ad6f-d0670e8eca29","Type":"ContainerStarted","Data":"05a51af5b1023c4c774cbedd150649a06ff70c36a6bc02b08b282419d3f8b062"} Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.889117 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:41 crc kubenswrapper[4766]: E0102 00:09:41.889465 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:42.389452146 +0000 UTC m=+157.187939300 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.910524 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-gsbjs" event={"ID":"f8bcffb8-8576-44bf-a116-5a19996a48a8","Type":"ContainerStarted","Data":"98e4954d46a44551e94bdd4308d86a13552f8363b93d79ce8cbceda383eb0ec6"} Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.923363 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-m8p7l" event={"ID":"8677afa8-a25b-4b5f-9c3c-bc13a667b9f0","Type":"ContainerStarted","Data":"f52792cbc523b81d1828547492a2cb95049625f4b39748f7cd5054b6a77f598c"} Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.937187 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-s6kgk" event={"ID":"a5536f9b-c0e4-4b6c-9b28-65a9766ca00b","Type":"ContainerStarted","Data":"c6b5785e75ffffe872770a44a1fa32e78185d5fd23646aed6bf58d186e866f07"} Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.937782 4766 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-qlzhc container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.937833 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" podUID="c27e1f6d-5925-4ca0-89f2-f447afbf845f" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.962329 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h6jd7" podStartSLOduration=136.962305756 podStartE2EDuration="2m16.962305756s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:41.900339049 +0000 UTC m=+156.698826203" watchObservedRunningTime="2026-01-02 00:09:41.962305756 +0000 UTC m=+156.760792910" Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.988509 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-xldzf" podStartSLOduration=9.988489994 podStartE2EDuration="9.988489994s" podCreationTimestamp="2026-01-02 00:09:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:41.96364526 +0000 UTC m=+156.762132424" watchObservedRunningTime="2026-01-02 00:09:41.988489994 +0000 UTC m=+156.786977148" Jan 02 00:09:41 crc kubenswrapper[4766]: I0102 00:09:41.990365 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:41 crc kubenswrapper[4766]: E0102 00:09:41.992310 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:42.49229258 +0000 UTC m=+157.290779734 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.003218 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dwfh5" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.017920 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b6fbc" podStartSLOduration=137.017897003 podStartE2EDuration="2m17.017897003s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:41.988931585 +0000 UTC m=+156.787418739" watchObservedRunningTime="2026-01-02 00:09:42.017897003 +0000 UTC m=+156.816384157" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.046392 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8c5sd" podStartSLOduration=137.046374968 podStartE2EDuration="2m17.046374968s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:42.020424697 +0000 UTC m=+156.818911861" watchObservedRunningTime="2026-01-02 00:09:42.046374968 +0000 UTC m=+156.844862122" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.047601 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" podStartSLOduration=137.047592799 podStartE2EDuration="2m17.047592799s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:42.045077126 +0000 UTC m=+156.843564280" watchObservedRunningTime="2026-01-02 00:09:42.047592799 +0000 UTC m=+156.846079953" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.110092 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" podStartSLOduration=136.110072659 podStartE2EDuration="2m16.110072659s" podCreationTimestamp="2026-01-02 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:42.107269088 +0000 UTC m=+156.905756262" watchObservedRunningTime="2026-01-02 00:09:42.110072659 +0000 UTC m=+156.908559813" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.119407 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l8k7h" podStartSLOduration=137.119395113 podStartE2EDuration="2m17.119395113s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:42.067835538 +0000 UTC m=+156.866322712" watchObservedRunningTime="2026-01-02 00:09:42.119395113 +0000 UTC m=+156.917882267" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.118919 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:42 crc kubenswrapper[4766]: E0102 00:09:42.119186 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:42.619173277 +0000 UTC m=+157.417660431 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.135813 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wvh2t"] Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.136625 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" podUID="afef674f-42ec-4083-ba3c-803546393a46" containerName="controller-manager" containerID="cri-o://8dcb1c3396348c4b271dbf1ecc11176f3052b10eb7c6304935421a6bec203dd4" gracePeriod=30 Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.166325 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-6ldzt" podStartSLOduration=136.166307912 podStartE2EDuration="2m16.166307912s" podCreationTimestamp="2026-01-02 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:42.165085871 +0000 UTC m=+156.963573035" watchObservedRunningTime="2026-01-02 00:09:42.166307912 +0000 UTC m=+156.964795066" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.206831 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-gsbjs" podStartSLOduration=137.206812239 podStartE2EDuration="2m17.206812239s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:42.205503456 +0000 UTC m=+157.003990640" watchObservedRunningTime="2026-01-02 00:09:42.206812239 +0000 UTC m=+157.005299393" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.221972 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:42 crc kubenswrapper[4766]: E0102 00:09:42.222437 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:42.722420561 +0000 UTC m=+157.520907715 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.261685 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-s6kgk" podStartSLOduration=136.261670397 podStartE2EDuration="2m16.261670397s" podCreationTimestamp="2026-01-02 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:42.226984885 +0000 UTC m=+157.025472039" watchObservedRunningTime="2026-01-02 00:09:42.261670397 +0000 UTC m=+157.060157551" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.302516 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-m8p7l" podStartSLOduration=137.302495023 podStartE2EDuration="2m17.302495023s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:42.302425891 +0000 UTC m=+157.100913055" watchObservedRunningTime="2026-01-02 00:09:42.302495023 +0000 UTC m=+157.100982177" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.323615 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.324168 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:42 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:42 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:42 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:42 crc kubenswrapper[4766]: E0102 00:09:42.324200 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:42.824180778 +0000 UTC m=+157.622667932 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.324418 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.349494 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2srpw" podStartSLOduration=137.349467313 podStartE2EDuration="2m17.349467313s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:42.337665037 +0000 UTC m=+157.136152201" watchObservedRunningTime="2026-01-02 00:09:42.349467313 +0000 UTC m=+157.147954467" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.425072 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:42 crc kubenswrapper[4766]: E0102 00:09:42.425890 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:42.925869102 +0000 UTC m=+157.724356256 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.527032 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:42 crc kubenswrapper[4766]: E0102 00:09:42.527371 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:43.027354982 +0000 UTC m=+157.825842136 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.628205 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:42 crc kubenswrapper[4766]: E0102 00:09:42.628434 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:43.128408141 +0000 UTC m=+157.926895295 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.628610 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:42 crc kubenswrapper[4766]: E0102 00:09:42.628923 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:43.128912044 +0000 UTC m=+157.927399198 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.729806 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:42 crc kubenswrapper[4766]: E0102 00:09:42.730061 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:43.230042024 +0000 UTC m=+158.028529178 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.730369 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:42 crc kubenswrapper[4766]: E0102 00:09:42.730767 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:43.230750382 +0000 UTC m=+158.029237536 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.765728 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:42 crc kubenswrapper[4766]: E0102 00:09:42.831254 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:43.331218686 +0000 UTC m=+158.129705840 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.831148 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.831514 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:42 crc kubenswrapper[4766]: E0102 00:09:42.831859 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:43.331832301 +0000 UTC m=+158.130319455 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.933341 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f64cs\" (UniqueName: \"kubernetes.io/projected/afef674f-42ec-4083-ba3c-803546393a46-kube-api-access-f64cs\") pod \"afef674f-42ec-4083-ba3c-803546393a46\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.933929 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-config\") pod \"afef674f-42ec-4083-ba3c-803546393a46\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.934025 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-client-ca\") pod \"afef674f-42ec-4083-ba3c-803546393a46\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.934199 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.934297 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/afef674f-42ec-4083-ba3c-803546393a46-serving-cert\") pod \"afef674f-42ec-4083-ba3c-803546393a46\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.934383 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-proxy-ca-bundles\") pod \"afef674f-42ec-4083-ba3c-803546393a46\" (UID: \"afef674f-42ec-4083-ba3c-803546393a46\") " Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.935026 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-client-ca" (OuterVolumeSpecName: "client-ca") pod "afef674f-42ec-4083-ba3c-803546393a46" (UID: "afef674f-42ec-4083-ba3c-803546393a46"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.935205 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "afef674f-42ec-4083-ba3c-803546393a46" (UID: "afef674f-42ec-4083-ba3c-803546393a46"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:09:42 crc kubenswrapper[4766]: E0102 00:09:42.935327 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:43.435312271 +0000 UTC m=+158.233799425 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.935412 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-config" (OuterVolumeSpecName: "config") pod "afef674f-42ec-4083-ba3c-803546393a46" (UID: "afef674f-42ec-4083-ba3c-803546393a46"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.942541 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afef674f-42ec-4083-ba3c-803546393a46-kube-api-access-f64cs" (OuterVolumeSpecName: "kube-api-access-f64cs") pod "afef674f-42ec-4083-ba3c-803546393a46" (UID: "afef674f-42ec-4083-ba3c-803546393a46"). InnerVolumeSpecName "kube-api-access-f64cs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.942963 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afef674f-42ec-4083-ba3c-803546393a46-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "afef674f-42ec-4083-ba3c-803546393a46" (UID: "afef674f-42ec-4083-ba3c-803546393a46"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.946481 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-gsbjs" event={"ID":"f8bcffb8-8576-44bf-a116-5a19996a48a8","Type":"ContainerStarted","Data":"8aaa7ae967008262c2cf48ed3cf86cda61df819ab1d6613fdd3fe1947feed09c"} Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.948542 4766 generic.go:334] "Generic (PLEG): container finished" podID="afef674f-42ec-4083-ba3c-803546393a46" containerID="8dcb1c3396348c4b271dbf1ecc11176f3052b10eb7c6304935421a6bec203dd4" exitCode=0 Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.948591 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" event={"ID":"afef674f-42ec-4083-ba3c-803546393a46","Type":"ContainerDied","Data":"8dcb1c3396348c4b271dbf1ecc11176f3052b10eb7c6304935421a6bec203dd4"} Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.948610 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" event={"ID":"afef674f-42ec-4083-ba3c-803546393a46","Type":"ContainerDied","Data":"6d8be454daa89fc65a0fd507c748e28773778222c1556b656c41b29869fa9aa6"} Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.948626 4766 scope.go:117] "RemoveContainer" containerID="8dcb1c3396348c4b271dbf1ecc11176f3052b10eb7c6304935421a6bec203dd4" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.948732 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wvh2t" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.950943 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" event={"ID":"add97db2-55ef-4d2b-89d3-32453f54579a","Type":"ContainerStarted","Data":"5cab1d2614effb42cc3498f842ee185d499d347059089c922029a1c1fbd39f7e"} Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.952751 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8g66k" event={"ID":"38fd421f-b7a6-4b8a-a623-575d28d65212","Type":"ContainerStarted","Data":"0ded45250b8e4fd8d9d15b4629d99b757342ebf0634b37db2d6f53a6c73fd98d"} Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.952979 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8g66k" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.961505 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xldzf" event={"ID":"933f7496-5243-4054-a3cb-e2383d7ac0ac","Type":"ContainerStarted","Data":"fc43858cc10e1c6264b98be963eb7c5000abfa7818cf315034da6ca995e1e9b4"} Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.962298 4766 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-5j85l container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.27:8443/healthz\": dial tcp 10.217.0.27:8443: connect: connection refused" start-of-body= Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.962344 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" podUID="16d8672b-0a81-48e3-bbda-d4f2ee461924" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.27:8443/healthz\": dial tcp 10.217.0.27:8443: connect: connection refused" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.963252 4766 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-wkgpj container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" start-of-body= Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.963323 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" podUID="0da67e94-91c7-479b-9c5c-a3044ea1a7be" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.972433 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8g66k" podStartSLOduration=137.972417054 podStartE2EDuration="2m17.972417054s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:42.970180727 +0000 UTC m=+157.768667901" watchObservedRunningTime="2026-01-02 00:09:42.972417054 +0000 UTC m=+157.770904198" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.993244 4766 scope.go:117] "RemoveContainer" containerID="8dcb1c3396348c4b271dbf1ecc11176f3052b10eb7c6304935421a6bec203dd4" Jan 02 00:09:42 crc kubenswrapper[4766]: E0102 00:09:42.993713 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8dcb1c3396348c4b271dbf1ecc11176f3052b10eb7c6304935421a6bec203dd4\": container with ID starting with 8dcb1c3396348c4b271dbf1ecc11176f3052b10eb7c6304935421a6bec203dd4 not found: ID does not exist" containerID="8dcb1c3396348c4b271dbf1ecc11176f3052b10eb7c6304935421a6bec203dd4" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.993755 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8dcb1c3396348c4b271dbf1ecc11176f3052b10eb7c6304935421a6bec203dd4"} err="failed to get container status \"8dcb1c3396348c4b271dbf1ecc11176f3052b10eb7c6304935421a6bec203dd4\": rpc error: code = NotFound desc = could not find container \"8dcb1c3396348c4b271dbf1ecc11176f3052b10eb7c6304935421a6bec203dd4\": container with ID starting with 8dcb1c3396348c4b271dbf1ecc11176f3052b10eb7c6304935421a6bec203dd4 not found: ID does not exist" Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.994555 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wvh2t"] Jan 02 00:09:42 crc kubenswrapper[4766]: I0102 00:09:42.998637 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wvh2t"] Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.035906 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.036026 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/afef674f-42ec-4083-ba3c-803546393a46-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.036040 4766 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.036049 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f64cs\" (UniqueName: \"kubernetes.io/projected/afef674f-42ec-4083-ba3c-803546393a46-kube-api-access-f64cs\") on node \"crc\" DevicePath \"\"" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.036057 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.036065 4766 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/afef674f-42ec-4083-ba3c-803546393a46-client-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.037964 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:43.53794656 +0000 UTC m=+158.336433804 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.137418 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.137597 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:43.637573283 +0000 UTC m=+158.436060437 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.138092 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.139124 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:43.639108931 +0000 UTC m=+158.437596175 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.166093 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.239089 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.239522 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:43.739479623 +0000 UTC m=+158.537966777 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.239732 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.240234 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:43.740223101 +0000 UTC m=+158.538710335 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.322033 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:43 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:43 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:43 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.322336 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.341323 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.341527 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:43.841476385 +0000 UTC m=+158.639963539 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.341753 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.342201 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:43.842183773 +0000 UTC m=+158.640670927 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.443537 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.443885 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:43.943857608 +0000 UTC m=+158.742344762 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.444054 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.444544 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:43.944535664 +0000 UTC m=+158.743022818 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.540384 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afef674f-42ec-4083-ba3c-803546393a46" path="/var/lib/kubelet/pods/afef674f-42ec-4083-ba3c-803546393a46/volumes" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.545713 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.545870 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:44.045836939 +0000 UTC m=+158.844324093 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.545938 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.546285 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:44.04627368 +0000 UTC m=+158.844760834 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.574024 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qgsrr"] Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.574259 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afef674f-42ec-4083-ba3c-803546393a46" containerName="controller-manager" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.574278 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="afef674f-42ec-4083-ba3c-803546393a46" containerName="controller-manager" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.574400 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="afef674f-42ec-4083-ba3c-803546393a46" containerName="controller-manager" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.574795 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.575992 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.576725 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.576920 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.578639 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.579209 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.595864 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.599319 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.606216 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qgsrr"] Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.647253 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.647697 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:44.147674588 +0000 UTC m=+158.946161742 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.647986 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.648132 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xs65l\" (UniqueName: \"kubernetes.io/projected/5ea96e94-99f0-443b-aec1-f500a377a705-kube-api-access-xs65l\") pod \"controller-manager-879f6c89f-qgsrr\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.648271 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-qgsrr\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.648360 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:44.148344725 +0000 UTC m=+158.946831879 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.648517 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-config\") pod \"controller-manager-879f6c89f-qgsrr\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.648614 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ea96e94-99f0-443b-aec1-f500a377a705-serving-cert\") pod \"controller-manager-879f6c89f-qgsrr\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.648807 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-client-ca\") pod \"controller-manager-879f6c89f-qgsrr\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.738678 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.739428 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.743540 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.743986 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.750328 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.750672 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xs65l\" (UniqueName: \"kubernetes.io/projected/5ea96e94-99f0-443b-aec1-f500a377a705-kube-api-access-xs65l\") pod \"controller-manager-879f6c89f-qgsrr\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.750734 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-qgsrr\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.750798 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-config\") pod \"controller-manager-879f6c89f-qgsrr\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.750819 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ea96e94-99f0-443b-aec1-f500a377a705-serving-cert\") pod \"controller-manager-879f6c89f-qgsrr\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.753428 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-qgsrr\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.753490 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:44.25204454 +0000 UTC m=+159.050531704 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.753642 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-client-ca\") pod \"controller-manager-879f6c89f-qgsrr\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.754184 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-config\") pod \"controller-manager-879f6c89f-qgsrr\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.754654 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-client-ca\") pod \"controller-manager-879f6c89f-qgsrr\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.767555 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ea96e94-99f0-443b-aec1-f500a377a705-serving-cert\") pod \"controller-manager-879f6c89f-qgsrr\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.772217 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.810858 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xs65l\" (UniqueName: \"kubernetes.io/projected/5ea96e94-99f0-443b-aec1-f500a377a705-kube-api-access-xs65l\") pod \"controller-manager-879f6c89f-qgsrr\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.855488 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.855834 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/609454d8-b199-4188-8659-5f669b795f79-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"609454d8-b199-4188-8659-5f669b795f79\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.855990 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:44.355966541 +0000 UTC m=+159.154453755 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.856090 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/609454d8-b199-4188-8659-5f669b795f79-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"609454d8-b199-4188-8659-5f669b795f79\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.897758 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.905648 4766 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.957302 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.957435 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:44.457406919 +0000 UTC m=+159.255894073 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.957579 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.957608 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/609454d8-b199-4188-8659-5f669b795f79-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"609454d8-b199-4188-8659-5f669b795f79\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.957646 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/609454d8-b199-4188-8659-5f669b795f79-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"609454d8-b199-4188-8659-5f669b795f79\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 02 00:09:43 crc kubenswrapper[4766]: E0102 00:09:43.958120 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:44.458113307 +0000 UTC m=+159.256600461 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.958249 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/609454d8-b199-4188-8659-5f669b795f79-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"609454d8-b199-4188-8659-5f669b795f79\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.990447 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" event={"ID":"add97db2-55ef-4d2b-89d3-32453f54579a","Type":"ContainerStarted","Data":"e2d61b857b3ae21a86329e3bd7e014fefa4e0ee47dd4f14d79d0849947d17aaa"} Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.990482 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" event={"ID":"add97db2-55ef-4d2b-89d3-32453f54579a","Type":"ContainerStarted","Data":"bcac53947f1ffc99e97eb6b08956c1249a72ace80b2ffa402ac4cdb09c8c3259"} Jan 02 00:09:43 crc kubenswrapper[4766]: I0102 00:09:43.997837 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/609454d8-b199-4188-8659-5f669b795f79-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"609454d8-b199-4188-8659-5f669b795f79\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.000566 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wkgpj" Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.058489 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:44 crc kubenswrapper[4766]: E0102 00:09:44.058784 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:44.558767736 +0000 UTC m=+159.357254890 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.067894 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.159584 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:44 crc kubenswrapper[4766]: E0102 00:09:44.161206 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:44.661188529 +0000 UTC m=+159.459675683 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.237466 4766 patch_prober.go:28] interesting pod/downloads-7954f5f757-8bc2w container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.237733 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8bc2w" podUID="509add4d-67be-4dff-8194-55516bc7bbd7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.238129 4766 patch_prober.go:28] interesting pod/downloads-7954f5f757-8bc2w container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.238148 4766 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-8bc2w" podUID="509add4d-67be-4dff-8194-55516bc7bbd7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.260772 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:44 crc kubenswrapper[4766]: E0102 00:09:44.261148 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:44.761136029 +0000 UTC m=+159.559623183 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.263159 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-s5ggw" Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.301705 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.301749 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.314906 4766 patch_prober.go:28] interesting pod/console-f9d7485db-482gx container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.22:8443/health\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.315024 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-482gx" podUID="bb03fd80-3959-4cea-9739-3580d6310a64" containerName="console" probeResult="failure" output="Get \"https://10.217.0.22:8443/health\": dial tcp 10.217.0.22:8443: connect: connection refused" Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.321125 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.331394 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:44 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:44 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:44 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.331446 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.362537 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:44 crc kubenswrapper[4766]: E0102 00:09:44.363905 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:44.863893652 +0000 UTC m=+159.662380806 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.389947 4766 patch_prober.go:28] interesting pod/apiserver-76f77b778f-cnd4q container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Jan 02 00:09:44 crc kubenswrapper[4766]: [+]log ok Jan 02 00:09:44 crc kubenswrapper[4766]: [+]etcd ok Jan 02 00:09:44 crc kubenswrapper[4766]: [+]poststarthook/start-apiserver-admission-initializer ok Jan 02 00:09:44 crc kubenswrapper[4766]: [+]poststarthook/generic-apiserver-start-informers ok Jan 02 00:09:44 crc kubenswrapper[4766]: [+]poststarthook/max-in-flight-filter ok Jan 02 00:09:44 crc kubenswrapper[4766]: [+]poststarthook/storage-object-count-tracker-hook ok Jan 02 00:09:44 crc kubenswrapper[4766]: [+]poststarthook/image.openshift.io-apiserver-caches ok Jan 02 00:09:44 crc kubenswrapper[4766]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Jan 02 00:09:44 crc kubenswrapper[4766]: [+]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa ok Jan 02 00:09:44 crc kubenswrapper[4766]: [+]poststarthook/project.openshift.io-projectcache ok Jan 02 00:09:44 crc kubenswrapper[4766]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Jan 02 00:09:44 crc kubenswrapper[4766]: [+]poststarthook/openshift.io-startinformers ok Jan 02 00:09:44 crc kubenswrapper[4766]: [+]poststarthook/openshift.io-restmapperupdater ok Jan 02 00:09:44 crc kubenswrapper[4766]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Jan 02 00:09:44 crc kubenswrapper[4766]: livez check failed Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.390013 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" podUID="f2ee1c3e-e76f-4a7b-b4bd-d74c526ed361" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.451032 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qgsrr"] Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.463835 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:44 crc kubenswrapper[4766]: E0102 00:09:44.465294 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:44.965274108 +0000 UTC m=+159.763761262 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.566198 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:44 crc kubenswrapper[4766]: E0102 00:09:44.566604 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:45.066590173 +0000 UTC m=+159.865077327 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.598923 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.667216 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:44 crc kubenswrapper[4766]: E0102 00:09:44.667577 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-02 00:09:45.167559321 +0000 UTC m=+159.966046475 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.768324 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:44 crc kubenswrapper[4766]: E0102 00:09:44.768723 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-02 00:09:45.268710342 +0000 UTC m=+160.067197486 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krqht" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.791953 4766 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-02T00:09:43.905675739Z","Handler":null,"Name":""} Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.797555 4766 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.797597 4766 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.869853 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.898531 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.971151 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.987472 4766 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 02 00:09:44 crc kubenswrapper[4766]: I0102 00:09:44.987509 4766 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.005961 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" event={"ID":"5ea96e94-99f0-443b-aec1-f500a377a705","Type":"ContainerStarted","Data":"9552f1bf449e24c01e52fe0f2788ea7b73675b134f2610735e1e0b0cdccd9e80"} Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.006016 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" event={"ID":"5ea96e94-99f0-443b-aec1-f500a377a705","Type":"ContainerStarted","Data":"ca2c4bb45bb7e2b6432d7009a56e169c95c7936bc758d6fa812a02997a6a3e12"} Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.006498 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.008570 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"609454d8-b199-4188-8659-5f669b795f79","Type":"ContainerStarted","Data":"0eac7fc0d34a7345ec147332f222432c51e8c065789a9c178dbfd0ae4eea6d53"} Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.014055 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krqht\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.014684 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.021875 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" event={"ID":"add97db2-55ef-4d2b-89d3-32453f54579a","Type":"ContainerStarted","Data":"6681bdea030630a1b0bb576043dde8ef16d83b6c8ef9d321556f6e504d476ba8"} Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.028244 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" podStartSLOduration=3.028216941 podStartE2EDuration="3.028216941s" podCreationTimestamp="2026-01-02 00:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:45.027399121 +0000 UTC m=+159.825886275" watchObservedRunningTime="2026-01-02 00:09:45.028216941 +0000 UTC m=+159.826704095" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.059125 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4pfq5"] Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.060036 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4pfq5" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.062414 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.082204 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4pfq5"] Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.083684 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-wjtkd" podStartSLOduration=13.083674204 podStartE2EDuration="13.083674204s" podCreationTimestamp="2026-01-02 00:09:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:45.082796212 +0000 UTC m=+159.881283366" watchObservedRunningTime="2026-01-02 00:09:45.083674204 +0000 UTC m=+159.882161358" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.175663 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.181017 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52a3f025-cab4-4df2-851c-a3144fc4e9de-catalog-content\") pod \"certified-operators-4pfq5\" (UID: \"52a3f025-cab4-4df2-851c-a3144fc4e9de\") " pod="openshift-marketplace/certified-operators-4pfq5" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.181053 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvrpt\" (UniqueName: \"kubernetes.io/projected/52a3f025-cab4-4df2-851c-a3144fc4e9de-kube-api-access-kvrpt\") pod \"certified-operators-4pfq5\" (UID: \"52a3f025-cab4-4df2-851c-a3144fc4e9de\") " pod="openshift-marketplace/certified-operators-4pfq5" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.181077 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52a3f025-cab4-4df2-851c-a3144fc4e9de-utilities\") pod \"certified-operators-4pfq5\" (UID: \"52a3f025-cab4-4df2-851c-a3144fc4e9de\") " pod="openshift-marketplace/certified-operators-4pfq5" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.282540 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52a3f025-cab4-4df2-851c-a3144fc4e9de-catalog-content\") pod \"certified-operators-4pfq5\" (UID: \"52a3f025-cab4-4df2-851c-a3144fc4e9de\") " pod="openshift-marketplace/certified-operators-4pfq5" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.282576 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvrpt\" (UniqueName: \"kubernetes.io/projected/52a3f025-cab4-4df2-851c-a3144fc4e9de-kube-api-access-kvrpt\") pod \"certified-operators-4pfq5\" (UID: \"52a3f025-cab4-4df2-851c-a3144fc4e9de\") " pod="openshift-marketplace/certified-operators-4pfq5" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.282598 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52a3f025-cab4-4df2-851c-a3144fc4e9de-utilities\") pod \"certified-operators-4pfq5\" (UID: \"52a3f025-cab4-4df2-851c-a3144fc4e9de\") " pod="openshift-marketplace/certified-operators-4pfq5" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.283059 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52a3f025-cab4-4df2-851c-a3144fc4e9de-utilities\") pod \"certified-operators-4pfq5\" (UID: \"52a3f025-cab4-4df2-851c-a3144fc4e9de\") " pod="openshift-marketplace/certified-operators-4pfq5" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.283264 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52a3f025-cab4-4df2-851c-a3144fc4e9de-catalog-content\") pod \"certified-operators-4pfq5\" (UID: \"52a3f025-cab4-4df2-851c-a3144fc4e9de\") " pod="openshift-marketplace/certified-operators-4pfq5" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.283554 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-c457j"] Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.285387 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c457j" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.288947 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.293579 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c457j"] Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.303586 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvrpt\" (UniqueName: \"kubernetes.io/projected/52a3f025-cab4-4df2-851c-a3144fc4e9de-kube-api-access-kvrpt\") pod \"certified-operators-4pfq5\" (UID: \"52a3f025-cab4-4df2-851c-a3144fc4e9de\") " pod="openshift-marketplace/certified-operators-4pfq5" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.320702 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:45 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:45 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:45 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.320787 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.345244 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.380892 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5j85l" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.383988 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c1f7ccd-9415-4560-968d-f20c5c44df58-catalog-content\") pod \"community-operators-c457j\" (UID: \"0c1f7ccd-9415-4560-968d-f20c5c44df58\") " pod="openshift-marketplace/community-operators-c457j" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.384042 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c1f7ccd-9415-4560-968d-f20c5c44df58-utilities\") pod \"community-operators-c457j\" (UID: \"0c1f7ccd-9415-4560-968d-f20c5c44df58\") " pod="openshift-marketplace/community-operators-c457j" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.384094 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8ppf\" (UniqueName: \"kubernetes.io/projected/0c1f7ccd-9415-4560-968d-f20c5c44df58-kube-api-access-m8ppf\") pod \"community-operators-c457j\" (UID: \"0c1f7ccd-9415-4560-968d-f20c5c44df58\") " pod="openshift-marketplace/community-operators-c457j" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.413540 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4pfq5" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.455861 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7prh8"] Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.457714 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7prh8" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.467087 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7prh8"] Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.487010 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c1f7ccd-9415-4560-968d-f20c5c44df58-catalog-content\") pod \"community-operators-c457j\" (UID: \"0c1f7ccd-9415-4560-968d-f20c5c44df58\") " pod="openshift-marketplace/community-operators-c457j" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.487094 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c1f7ccd-9415-4560-968d-f20c5c44df58-utilities\") pod \"community-operators-c457j\" (UID: \"0c1f7ccd-9415-4560-968d-f20c5c44df58\") " pod="openshift-marketplace/community-operators-c457j" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.487152 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8ppf\" (UniqueName: \"kubernetes.io/projected/0c1f7ccd-9415-4560-968d-f20c5c44df58-kube-api-access-m8ppf\") pod \"community-operators-c457j\" (UID: \"0c1f7ccd-9415-4560-968d-f20c5c44df58\") " pod="openshift-marketplace/community-operators-c457j" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.488990 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c1f7ccd-9415-4560-968d-f20c5c44df58-catalog-content\") pod \"community-operators-c457j\" (UID: \"0c1f7ccd-9415-4560-968d-f20c5c44df58\") " pod="openshift-marketplace/community-operators-c457j" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.496720 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c1f7ccd-9415-4560-968d-f20c5c44df58-utilities\") pod \"community-operators-c457j\" (UID: \"0c1f7ccd-9415-4560-968d-f20c5c44df58\") " pod="openshift-marketplace/community-operators-c457j" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.526424 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8ppf\" (UniqueName: \"kubernetes.io/projected/0c1f7ccd-9415-4560-968d-f20c5c44df58-kube-api-access-m8ppf\") pod \"community-operators-c457j\" (UID: \"0c1f7ccd-9415-4560-968d-f20c5c44df58\") " pod="openshift-marketplace/community-operators-c457j" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.549555 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.589639 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s948f\" (UniqueName: \"kubernetes.io/projected/2298405f-0595-45a3-9586-7ab0c713531b-kube-api-access-s948f\") pod \"certified-operators-7prh8\" (UID: \"2298405f-0595-45a3-9586-7ab0c713531b\") " pod="openshift-marketplace/certified-operators-7prh8" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.589705 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2298405f-0595-45a3-9586-7ab0c713531b-catalog-content\") pod \"certified-operators-7prh8\" (UID: \"2298405f-0595-45a3-9586-7ab0c713531b\") " pod="openshift-marketplace/certified-operators-7prh8" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.589759 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2298405f-0595-45a3-9586-7ab0c713531b-utilities\") pod \"certified-operators-7prh8\" (UID: \"2298405f-0595-45a3-9586-7ab0c713531b\") " pod="openshift-marketplace/certified-operators-7prh8" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.603980 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-krqht"] Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.605594 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c457j" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.661953 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-r2cdb"] Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.666331 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r2cdb" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.668416 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r2cdb"] Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.690859 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2298405f-0595-45a3-9586-7ab0c713531b-utilities\") pod \"certified-operators-7prh8\" (UID: \"2298405f-0595-45a3-9586-7ab0c713531b\") " pod="openshift-marketplace/certified-operators-7prh8" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.690945 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s948f\" (UniqueName: \"kubernetes.io/projected/2298405f-0595-45a3-9586-7ab0c713531b-kube-api-access-s948f\") pod \"certified-operators-7prh8\" (UID: \"2298405f-0595-45a3-9586-7ab0c713531b\") " pod="openshift-marketplace/certified-operators-7prh8" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.690965 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2298405f-0595-45a3-9586-7ab0c713531b-catalog-content\") pod \"certified-operators-7prh8\" (UID: \"2298405f-0595-45a3-9586-7ab0c713531b\") " pod="openshift-marketplace/certified-operators-7prh8" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.691355 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2298405f-0595-45a3-9586-7ab0c713531b-utilities\") pod \"certified-operators-7prh8\" (UID: \"2298405f-0595-45a3-9586-7ab0c713531b\") " pod="openshift-marketplace/certified-operators-7prh8" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.695172 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2298405f-0595-45a3-9586-7ab0c713531b-catalog-content\") pod \"certified-operators-7prh8\" (UID: \"2298405f-0595-45a3-9586-7ab0c713531b\") " pod="openshift-marketplace/certified-operators-7prh8" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.714477 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s948f\" (UniqueName: \"kubernetes.io/projected/2298405f-0595-45a3-9586-7ab0c713531b-kube-api-access-s948f\") pod \"certified-operators-7prh8\" (UID: \"2298405f-0595-45a3-9586-7ab0c713531b\") " pod="openshift-marketplace/certified-operators-7prh8" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.793604 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-utilities\") pod \"community-operators-r2cdb\" (UID: \"d04a5cef-a04c-4451-9b0f-ba9dea4877f2\") " pod="openshift-marketplace/community-operators-r2cdb" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.793708 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lr74q\" (UniqueName: \"kubernetes.io/projected/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-kube-api-access-lr74q\") pod \"community-operators-r2cdb\" (UID: \"d04a5cef-a04c-4451-9b0f-ba9dea4877f2\") " pod="openshift-marketplace/community-operators-r2cdb" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.793743 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-catalog-content\") pod \"community-operators-r2cdb\" (UID: \"d04a5cef-a04c-4451-9b0f-ba9dea4877f2\") " pod="openshift-marketplace/community-operators-r2cdb" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.822284 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7prh8" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.823676 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4pfq5"] Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.898495 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-utilities\") pod \"community-operators-r2cdb\" (UID: \"d04a5cef-a04c-4451-9b0f-ba9dea4877f2\") " pod="openshift-marketplace/community-operators-r2cdb" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.898614 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lr74q\" (UniqueName: \"kubernetes.io/projected/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-kube-api-access-lr74q\") pod \"community-operators-r2cdb\" (UID: \"d04a5cef-a04c-4451-9b0f-ba9dea4877f2\") " pod="openshift-marketplace/community-operators-r2cdb" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.898646 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-catalog-content\") pod \"community-operators-r2cdb\" (UID: \"d04a5cef-a04c-4451-9b0f-ba9dea4877f2\") " pod="openshift-marketplace/community-operators-r2cdb" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.899163 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-catalog-content\") pod \"community-operators-r2cdb\" (UID: \"d04a5cef-a04c-4451-9b0f-ba9dea4877f2\") " pod="openshift-marketplace/community-operators-r2cdb" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.899419 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-utilities\") pod \"community-operators-r2cdb\" (UID: \"d04a5cef-a04c-4451-9b0f-ba9dea4877f2\") " pod="openshift-marketplace/community-operators-r2cdb" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.901302 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c457j"] Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.922661 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lr74q\" (UniqueName: \"kubernetes.io/projected/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-kube-api-access-lr74q\") pod \"community-operators-r2cdb\" (UID: \"d04a5cef-a04c-4451-9b0f-ba9dea4877f2\") " pod="openshift-marketplace/community-operators-r2cdb" Jan 02 00:09:45 crc kubenswrapper[4766]: I0102 00:09:45.985537 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r2cdb" Jan 02 00:09:46 crc kubenswrapper[4766]: I0102 00:09:46.026398 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pfq5" event={"ID":"52a3f025-cab4-4df2-851c-a3144fc4e9de","Type":"ContainerStarted","Data":"32272cdc0c39b98cb6562b6e1cd2e3d0ac9623074dd539241a307e0a13022caf"} Jan 02 00:09:46 crc kubenswrapper[4766]: I0102 00:09:46.027445 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c457j" event={"ID":"0c1f7ccd-9415-4560-968d-f20c5c44df58","Type":"ContainerStarted","Data":"dd6aaafbd47ed961bb9ce9c91f9b1e3b1e9c57d010a1a57dc05b41ab6e9589c9"} Jan 02 00:09:46 crc kubenswrapper[4766]: I0102 00:09:46.028596 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"609454d8-b199-4188-8659-5f669b795f79","Type":"ContainerStarted","Data":"f54f401b21a510ce6b22e067758548901204144fd8d3982c99657e6cbb4e2a54"} Jan 02 00:09:46 crc kubenswrapper[4766]: I0102 00:09:46.029528 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-krqht" event={"ID":"bbc14cde-7795-4cb5-a9e0-0c2500835f44","Type":"ContainerStarted","Data":"68dd47fea876ee32606375dc69a3ed2aeeb2eb4e94afea126ea2fe16c7940a59"} Jan 02 00:09:46 crc kubenswrapper[4766]: I0102 00:09:46.147940 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7prh8"] Jan 02 00:09:46 crc kubenswrapper[4766]: W0102 00:09:46.157108 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2298405f_0595_45a3_9586_7ab0c713531b.slice/crio-29236b6a2a3a6d265eef7e4590c1a392666f7f2c96a12acfb11896a3acc9f007 WatchSource:0}: Error finding container 29236b6a2a3a6d265eef7e4590c1a392666f7f2c96a12acfb11896a3acc9f007: Status 404 returned error can't find the container with id 29236b6a2a3a6d265eef7e4590c1a392666f7f2c96a12acfb11896a3acc9f007 Jan 02 00:09:46 crc kubenswrapper[4766]: I0102 00:09:46.235396 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r2cdb"] Jan 02 00:09:46 crc kubenswrapper[4766]: W0102 00:09:46.251373 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd04a5cef_a04c_4451_9b0f_ba9dea4877f2.slice/crio-b5aca1d39e3cdd8cbbf2c1eca98f58ea4e164419d6918f3834035cb39b572067 WatchSource:0}: Error finding container b5aca1d39e3cdd8cbbf2c1eca98f58ea4e164419d6918f3834035cb39b572067: Status 404 returned error can't find the container with id b5aca1d39e3cdd8cbbf2c1eca98f58ea4e164419d6918f3834035cb39b572067 Jan 02 00:09:46 crc kubenswrapper[4766]: I0102 00:09:46.322955 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:46 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:46 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:46 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:46 crc kubenswrapper[4766]: I0102 00:09:46.323012 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.036514 4766 generic.go:334] "Generic (PLEG): container finished" podID="0c1f7ccd-9415-4560-968d-f20c5c44df58" containerID="74b7c8b8d7a4af366ca427acc34a769a7581e37ffec6379bd531d9b7016a6c1b" exitCode=0 Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.036674 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c457j" event={"ID":"0c1f7ccd-9415-4560-968d-f20c5c44df58","Type":"ContainerDied","Data":"74b7c8b8d7a4af366ca427acc34a769a7581e37ffec6379bd531d9b7016a6c1b"} Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.038260 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r2cdb" event={"ID":"d04a5cef-a04c-4451-9b0f-ba9dea4877f2","Type":"ContainerStarted","Data":"e6963ccde9abcd12603d54ff0dec2bbebe24cf4764349ede9d8e032ac9c6abdd"} Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.038291 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r2cdb" event={"ID":"d04a5cef-a04c-4451-9b0f-ba9dea4877f2","Type":"ContainerStarted","Data":"b5aca1d39e3cdd8cbbf2c1eca98f58ea4e164419d6918f3834035cb39b572067"} Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.039579 4766 generic.go:334] "Generic (PLEG): container finished" podID="49a72ec1-ed97-4bc9-947b-1df688c6676d" containerID="00d9e6f95bbe9f24ced66da0d4c53dc5bddf495d4d59f6a05eba8ba0dfa768a0" exitCode=0 Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.039653 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" event={"ID":"49a72ec1-ed97-4bc9-947b-1df688c6676d","Type":"ContainerDied","Data":"00d9e6f95bbe9f24ced66da0d4c53dc5bddf495d4d59f6a05eba8ba0dfa768a0"} Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.041184 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-krqht" event={"ID":"bbc14cde-7795-4cb5-a9e0-0c2500835f44","Type":"ContainerStarted","Data":"f1fbf4730b1ec3252c2624a174674dec13b28efec7d58fc775108f67882b56cc"} Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.043964 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7prh8" event={"ID":"2298405f-0595-45a3-9586-7ab0c713531b","Type":"ContainerStarted","Data":"9ae267a4b2fd6bfb396e19c5c437fafdf20848010a79bbf7a00af4cbe682f017"} Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.043993 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7prh8" event={"ID":"2298405f-0595-45a3-9586-7ab0c713531b","Type":"ContainerStarted","Data":"29236b6a2a3a6d265eef7e4590c1a392666f7f2c96a12acfb11896a3acc9f007"} Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.045630 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pfq5" event={"ID":"52a3f025-cab4-4df2-851c-a3144fc4e9de","Type":"ContainerStarted","Data":"3043c8962d1b6d345cb94590278f434967dad21e75bebb1baed7040b8575043a"} Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.091610 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=4.091592788 podStartE2EDuration="4.091592788s" podCreationTimestamp="2026-01-02 00:09:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:47.090158842 +0000 UTC m=+161.888646016" watchObservedRunningTime="2026-01-02 00:09:47.091592788 +0000 UTC m=+161.890079942" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.260023 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kmkx9"] Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.261464 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kmkx9" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.263234 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.274960 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kmkx9"] Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.319990 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:47 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:47 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:47 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.320070 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.418728 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8sf6\" (UniqueName: \"kubernetes.io/projected/c5f043c1-da6b-433d-8db2-47b8d31ab74c-kube-api-access-l8sf6\") pod \"redhat-marketplace-kmkx9\" (UID: \"c5f043c1-da6b-433d-8db2-47b8d31ab74c\") " pod="openshift-marketplace/redhat-marketplace-kmkx9" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.418922 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5f043c1-da6b-433d-8db2-47b8d31ab74c-catalog-content\") pod \"redhat-marketplace-kmkx9\" (UID: \"c5f043c1-da6b-433d-8db2-47b8d31ab74c\") " pod="openshift-marketplace/redhat-marketplace-kmkx9" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.418980 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5f043c1-da6b-433d-8db2-47b8d31ab74c-utilities\") pod \"redhat-marketplace-kmkx9\" (UID: \"c5f043c1-da6b-433d-8db2-47b8d31ab74c\") " pod="openshift-marketplace/redhat-marketplace-kmkx9" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.520660 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8sf6\" (UniqueName: \"kubernetes.io/projected/c5f043c1-da6b-433d-8db2-47b8d31ab74c-kube-api-access-l8sf6\") pod \"redhat-marketplace-kmkx9\" (UID: \"c5f043c1-da6b-433d-8db2-47b8d31ab74c\") " pod="openshift-marketplace/redhat-marketplace-kmkx9" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.520788 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5f043c1-da6b-433d-8db2-47b8d31ab74c-catalog-content\") pod \"redhat-marketplace-kmkx9\" (UID: \"c5f043c1-da6b-433d-8db2-47b8d31ab74c\") " pod="openshift-marketplace/redhat-marketplace-kmkx9" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.520825 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5f043c1-da6b-433d-8db2-47b8d31ab74c-utilities\") pod \"redhat-marketplace-kmkx9\" (UID: \"c5f043c1-da6b-433d-8db2-47b8d31ab74c\") " pod="openshift-marketplace/redhat-marketplace-kmkx9" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.521403 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5f043c1-da6b-433d-8db2-47b8d31ab74c-catalog-content\") pod \"redhat-marketplace-kmkx9\" (UID: \"c5f043c1-da6b-433d-8db2-47b8d31ab74c\") " pod="openshift-marketplace/redhat-marketplace-kmkx9" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.521470 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5f043c1-da6b-433d-8db2-47b8d31ab74c-utilities\") pod \"redhat-marketplace-kmkx9\" (UID: \"c5f043c1-da6b-433d-8db2-47b8d31ab74c\") " pod="openshift-marketplace/redhat-marketplace-kmkx9" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.546297 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8sf6\" (UniqueName: \"kubernetes.io/projected/c5f043c1-da6b-433d-8db2-47b8d31ab74c-kube-api-access-l8sf6\") pod \"redhat-marketplace-kmkx9\" (UID: \"c5f043c1-da6b-433d-8db2-47b8d31ab74c\") " pod="openshift-marketplace/redhat-marketplace-kmkx9" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.574781 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kmkx9" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.653722 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-b76n6"] Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.654968 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b76n6" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.671587 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b76n6"] Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.824239 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/296488e0-7f00-413d-87fb-d8cecb1b186c-catalog-content\") pod \"redhat-marketplace-b76n6\" (UID: \"296488e0-7f00-413d-87fb-d8cecb1b186c\") " pod="openshift-marketplace/redhat-marketplace-b76n6" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.824345 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/296488e0-7f00-413d-87fb-d8cecb1b186c-utilities\") pod \"redhat-marketplace-b76n6\" (UID: \"296488e0-7f00-413d-87fb-d8cecb1b186c\") " pod="openshift-marketplace/redhat-marketplace-b76n6" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.824398 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdd79\" (UniqueName: \"kubernetes.io/projected/296488e0-7f00-413d-87fb-d8cecb1b186c-kube-api-access-bdd79\") pod \"redhat-marketplace-b76n6\" (UID: \"296488e0-7f00-413d-87fb-d8cecb1b186c\") " pod="openshift-marketplace/redhat-marketplace-b76n6" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.926065 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/296488e0-7f00-413d-87fb-d8cecb1b186c-utilities\") pod \"redhat-marketplace-b76n6\" (UID: \"296488e0-7f00-413d-87fb-d8cecb1b186c\") " pod="openshift-marketplace/redhat-marketplace-b76n6" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.926178 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdd79\" (UniqueName: \"kubernetes.io/projected/296488e0-7f00-413d-87fb-d8cecb1b186c-kube-api-access-bdd79\") pod \"redhat-marketplace-b76n6\" (UID: \"296488e0-7f00-413d-87fb-d8cecb1b186c\") " pod="openshift-marketplace/redhat-marketplace-b76n6" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.926285 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/296488e0-7f00-413d-87fb-d8cecb1b186c-catalog-content\") pod \"redhat-marketplace-b76n6\" (UID: \"296488e0-7f00-413d-87fb-d8cecb1b186c\") " pod="openshift-marketplace/redhat-marketplace-b76n6" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.927413 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/296488e0-7f00-413d-87fb-d8cecb1b186c-utilities\") pod \"redhat-marketplace-b76n6\" (UID: \"296488e0-7f00-413d-87fb-d8cecb1b186c\") " pod="openshift-marketplace/redhat-marketplace-b76n6" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.927485 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/296488e0-7f00-413d-87fb-d8cecb1b186c-catalog-content\") pod \"redhat-marketplace-b76n6\" (UID: \"296488e0-7f00-413d-87fb-d8cecb1b186c\") " pod="openshift-marketplace/redhat-marketplace-b76n6" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.951058 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdd79\" (UniqueName: \"kubernetes.io/projected/296488e0-7f00-413d-87fb-d8cecb1b186c-kube-api-access-bdd79\") pod \"redhat-marketplace-b76n6\" (UID: \"296488e0-7f00-413d-87fb-d8cecb1b186c\") " pod="openshift-marketplace/redhat-marketplace-b76n6" Jan 02 00:09:47 crc kubenswrapper[4766]: I0102 00:09:47.983983 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b76n6" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.082089 4766 generic.go:334] "Generic (PLEG): container finished" podID="2298405f-0595-45a3-9586-7ab0c713531b" containerID="9ae267a4b2fd6bfb396e19c5c437fafdf20848010a79bbf7a00af4cbe682f017" exitCode=0 Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.082720 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7prh8" event={"ID":"2298405f-0595-45a3-9586-7ab0c713531b","Type":"ContainerDied","Data":"9ae267a4b2fd6bfb396e19c5c437fafdf20848010a79bbf7a00af4cbe682f017"} Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.088341 4766 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.090072 4766 generic.go:334] "Generic (PLEG): container finished" podID="52a3f025-cab4-4df2-851c-a3144fc4e9de" containerID="3043c8962d1b6d345cb94590278f434967dad21e75bebb1baed7040b8575043a" exitCode=0 Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.090143 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pfq5" event={"ID":"52a3f025-cab4-4df2-851c-a3144fc4e9de","Type":"ContainerDied","Data":"3043c8962d1b6d345cb94590278f434967dad21e75bebb1baed7040b8575043a"} Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.100370 4766 generic.go:334] "Generic (PLEG): container finished" podID="d04a5cef-a04c-4451-9b0f-ba9dea4877f2" containerID="e6963ccde9abcd12603d54ff0dec2bbebe24cf4764349ede9d8e032ac9c6abdd" exitCode=0 Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.101073 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r2cdb" event={"ID":"d04a5cef-a04c-4451-9b0f-ba9dea4877f2","Type":"ContainerDied","Data":"e6963ccde9abcd12603d54ff0dec2bbebe24cf4764349ede9d8e032ac9c6abdd"} Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.101145 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.173524 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-krqht" podStartSLOduration=143.173499908 podStartE2EDuration="2m23.173499908s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:48.170959264 +0000 UTC m=+162.969446428" watchObservedRunningTime="2026-01-02 00:09:48.173499908 +0000 UTC m=+162.971987062" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.260103 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-txcw7"] Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.261886 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-txcw7" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.265538 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.286750 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kmkx9"] Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.290370 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-txcw7"] Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.320976 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:48 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:48 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:48 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.321320 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.333904 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-catalog-content\") pod \"redhat-operators-txcw7\" (UID: \"ed7d2a87-a727-4174-ac4d-746b7d7a29fd\") " pod="openshift-marketplace/redhat-operators-txcw7" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.333946 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptd2x\" (UniqueName: \"kubernetes.io/projected/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-kube-api-access-ptd2x\") pod \"redhat-operators-txcw7\" (UID: \"ed7d2a87-a727-4174-ac4d-746b7d7a29fd\") " pod="openshift-marketplace/redhat-operators-txcw7" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.334002 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-utilities\") pod \"redhat-operators-txcw7\" (UID: \"ed7d2a87-a727-4174-ac4d-746b7d7a29fd\") " pod="openshift-marketplace/redhat-operators-txcw7" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.435076 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-utilities\") pod \"redhat-operators-txcw7\" (UID: \"ed7d2a87-a727-4174-ac4d-746b7d7a29fd\") " pod="openshift-marketplace/redhat-operators-txcw7" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.435233 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-catalog-content\") pod \"redhat-operators-txcw7\" (UID: \"ed7d2a87-a727-4174-ac4d-746b7d7a29fd\") " pod="openshift-marketplace/redhat-operators-txcw7" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.435270 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptd2x\" (UniqueName: \"kubernetes.io/projected/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-kube-api-access-ptd2x\") pod \"redhat-operators-txcw7\" (UID: \"ed7d2a87-a727-4174-ac4d-746b7d7a29fd\") " pod="openshift-marketplace/redhat-operators-txcw7" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.435619 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-catalog-content\") pod \"redhat-operators-txcw7\" (UID: \"ed7d2a87-a727-4174-ac4d-746b7d7a29fd\") " pod="openshift-marketplace/redhat-operators-txcw7" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.435616 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-utilities\") pod \"redhat-operators-txcw7\" (UID: \"ed7d2a87-a727-4174-ac4d-746b7d7a29fd\") " pod="openshift-marketplace/redhat-operators-txcw7" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.458647 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.461618 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptd2x\" (UniqueName: \"kubernetes.io/projected/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-kube-api-access-ptd2x\") pod \"redhat-operators-txcw7\" (UID: \"ed7d2a87-a727-4174-ac4d-746b7d7a29fd\") " pod="openshift-marketplace/redhat-operators-txcw7" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.536059 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gh449\" (UniqueName: \"kubernetes.io/projected/49a72ec1-ed97-4bc9-947b-1df688c6676d-kube-api-access-gh449\") pod \"49a72ec1-ed97-4bc9-947b-1df688c6676d\" (UID: \"49a72ec1-ed97-4bc9-947b-1df688c6676d\") " Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.536154 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/49a72ec1-ed97-4bc9-947b-1df688c6676d-config-volume\") pod \"49a72ec1-ed97-4bc9-947b-1df688c6676d\" (UID: \"49a72ec1-ed97-4bc9-947b-1df688c6676d\") " Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.536220 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/49a72ec1-ed97-4bc9-947b-1df688c6676d-secret-volume\") pod \"49a72ec1-ed97-4bc9-947b-1df688c6676d\" (UID: \"49a72ec1-ed97-4bc9-947b-1df688c6676d\") " Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.536390 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs\") pod \"network-metrics-daemon-zd2t8\" (UID: \"f869b527-abf2-400f-90c2-ebee595564a2\") " pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.536858 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49a72ec1-ed97-4bc9-947b-1df688c6676d-config-volume" (OuterVolumeSpecName: "config-volume") pod "49a72ec1-ed97-4bc9-947b-1df688c6676d" (UID: "49a72ec1-ed97-4bc9-947b-1df688c6676d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.540620 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b76n6"] Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.549674 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49a72ec1-ed97-4bc9-947b-1df688c6676d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "49a72ec1-ed97-4bc9-947b-1df688c6676d" (UID: "49a72ec1-ed97-4bc9-947b-1df688c6676d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.550467 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49a72ec1-ed97-4bc9-947b-1df688c6676d-kube-api-access-gh449" (OuterVolumeSpecName: "kube-api-access-gh449") pod "49a72ec1-ed97-4bc9-947b-1df688c6676d" (UID: "49a72ec1-ed97-4bc9-947b-1df688c6676d"). InnerVolumeSpecName "kube-api-access-gh449". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.551923 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f869b527-abf2-400f-90c2-ebee595564a2-metrics-certs\") pod \"network-metrics-daemon-zd2t8\" (UID: \"f869b527-abf2-400f-90c2-ebee595564a2\") " pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:48 crc kubenswrapper[4766]: W0102 00:09:48.554634 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod296488e0_7f00_413d_87fb_d8cecb1b186c.slice/crio-26b35f1adb2f61ae6af26bfb2553dacdc842142ddd5dd96478e903e4619e1d78 WatchSource:0}: Error finding container 26b35f1adb2f61ae6af26bfb2553dacdc842142ddd5dd96478e903e4619e1d78: Status 404 returned error can't find the container with id 26b35f1adb2f61ae6af26bfb2553dacdc842142ddd5dd96478e903e4619e1d78 Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.577999 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-txcw7" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.637681 4766 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/49a72ec1-ed97-4bc9-947b-1df688c6676d-config-volume\") on node \"crc\" DevicePath \"\"" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.637701 4766 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/49a72ec1-ed97-4bc9-947b-1df688c6676d-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.637710 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gh449\" (UniqueName: \"kubernetes.io/projected/49a72ec1-ed97-4bc9-947b-1df688c6676d-kube-api-access-gh449\") on node \"crc\" DevicePath \"\"" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.652322 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bjfhq"] Jan 02 00:09:48 crc kubenswrapper[4766]: E0102 00:09:48.652871 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49a72ec1-ed97-4bc9-947b-1df688c6676d" containerName="collect-profiles" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.652890 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="49a72ec1-ed97-4bc9-947b-1df688c6676d" containerName="collect-profiles" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.653019 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="49a72ec1-ed97-4bc9-947b-1df688c6676d" containerName="collect-profiles" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.658527 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bjfhq" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.660624 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.661283 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.663494 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.664511 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.701628 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.701676 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bjfhq"] Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.738965 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-catalog-content\") pod \"redhat-operators-bjfhq\" (UID: \"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d\") " pod="openshift-marketplace/redhat-operators-bjfhq" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.738999 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9j2q\" (UniqueName: \"kubernetes.io/projected/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-kube-api-access-g9j2q\") pod \"redhat-operators-bjfhq\" (UID: \"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d\") " pod="openshift-marketplace/redhat-operators-bjfhq" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.739027 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1f3c7284-9fed-4a33-adf5-ae5b5190c0ab-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"1f3c7284-9fed-4a33-adf5-ae5b5190c0ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.739054 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-utilities\") pod \"redhat-operators-bjfhq\" (UID: \"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d\") " pod="openshift-marketplace/redhat-operators-bjfhq" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.739070 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1f3c7284-9fed-4a33-adf5-ae5b5190c0ab-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"1f3c7284-9fed-4a33-adf5-ae5b5190c0ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.772142 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zd2t8" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.840047 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-catalog-content\") pod \"redhat-operators-bjfhq\" (UID: \"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d\") " pod="openshift-marketplace/redhat-operators-bjfhq" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.840097 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9j2q\" (UniqueName: \"kubernetes.io/projected/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-kube-api-access-g9j2q\") pod \"redhat-operators-bjfhq\" (UID: \"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d\") " pod="openshift-marketplace/redhat-operators-bjfhq" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.840124 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1f3c7284-9fed-4a33-adf5-ae5b5190c0ab-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"1f3c7284-9fed-4a33-adf5-ae5b5190c0ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.840152 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-utilities\") pod \"redhat-operators-bjfhq\" (UID: \"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d\") " pod="openshift-marketplace/redhat-operators-bjfhq" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.840169 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1f3c7284-9fed-4a33-adf5-ae5b5190c0ab-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"1f3c7284-9fed-4a33-adf5-ae5b5190c0ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.840328 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1f3c7284-9fed-4a33-adf5-ae5b5190c0ab-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"1f3c7284-9fed-4a33-adf5-ae5b5190c0ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.840721 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-catalog-content\") pod \"redhat-operators-bjfhq\" (UID: \"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d\") " pod="openshift-marketplace/redhat-operators-bjfhq" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.841562 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-utilities\") pod \"redhat-operators-bjfhq\" (UID: \"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d\") " pod="openshift-marketplace/redhat-operators-bjfhq" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.868570 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1f3c7284-9fed-4a33-adf5-ae5b5190c0ab-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"1f3c7284-9fed-4a33-adf5-ae5b5190c0ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.873071 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9j2q\" (UniqueName: \"kubernetes.io/projected/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-kube-api-access-g9j2q\") pod \"redhat-operators-bjfhq\" (UID: \"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d\") " pod="openshift-marketplace/redhat-operators-bjfhq" Jan 02 00:09:48 crc kubenswrapper[4766]: I0102 00:09:48.917780 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-txcw7"] Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.022264 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bjfhq" Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.046400 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.069515 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-zd2t8"] Jan 02 00:09:49 crc kubenswrapper[4766]: W0102 00:09:49.088172 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf869b527_abf2_400f_90c2_ebee595564a2.slice/crio-babce34771b0abd3ace8b0b843d88c8302b204ab4ca56221f532f79e23e750fe WatchSource:0}: Error finding container babce34771b0abd3ace8b0b843d88c8302b204ab4ca56221f532f79e23e750fe: Status 404 returned error can't find the container with id babce34771b0abd3ace8b0b843d88c8302b204ab4ca56221f532f79e23e750fe Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.127478 4766 generic.go:334] "Generic (PLEG): container finished" podID="296488e0-7f00-413d-87fb-d8cecb1b186c" containerID="a2c853298d984527b6b9eaf7e0a632c2918be72eac7b8dbfbf0db880a626844f" exitCode=0 Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.128189 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b76n6" event={"ID":"296488e0-7f00-413d-87fb-d8cecb1b186c","Type":"ContainerDied","Data":"a2c853298d984527b6b9eaf7e0a632c2918be72eac7b8dbfbf0db880a626844f"} Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.128215 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b76n6" event={"ID":"296488e0-7f00-413d-87fb-d8cecb1b186c","Type":"ContainerStarted","Data":"26b35f1adb2f61ae6af26bfb2553dacdc842142ddd5dd96478e903e4619e1d78"} Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.130909 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" event={"ID":"49a72ec1-ed97-4bc9-947b-1df688c6676d","Type":"ContainerDied","Data":"cbecda82a0c69c935897745d00b8815899227c5bfe1029091ac8a39494dd4436"} Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.130933 4766 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cbecda82a0c69c935897745d00b8815899227c5bfe1029091ac8a39494dd4436" Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.131010 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29455200-4bwhv" Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.132401 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" event={"ID":"f869b527-abf2-400f-90c2-ebee595564a2","Type":"ContainerStarted","Data":"babce34771b0abd3ace8b0b843d88c8302b204ab4ca56221f532f79e23e750fe"} Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.133594 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-txcw7" event={"ID":"ed7d2a87-a727-4174-ac4d-746b7d7a29fd","Type":"ContainerStarted","Data":"3f67b829109217e7e868ff876b93b9ca5047f51d2fd5370941c468da7bc0ca8d"} Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.134922 4766 generic.go:334] "Generic (PLEG): container finished" podID="c5f043c1-da6b-433d-8db2-47b8d31ab74c" containerID="f6461bc691570b6e6c4125bf5039f74afcdf0bf920fdf22d4ff4fd1c4972c5cf" exitCode=0 Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.134975 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kmkx9" event={"ID":"c5f043c1-da6b-433d-8db2-47b8d31ab74c","Type":"ContainerDied","Data":"f6461bc691570b6e6c4125bf5039f74afcdf0bf920fdf22d4ff4fd1c4972c5cf"} Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.134992 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kmkx9" event={"ID":"c5f043c1-da6b-433d-8db2-47b8d31ab74c","Type":"ContainerStarted","Data":"2b31100575ae9bd08e3f3b1aa7046ae0e8a73742fd5ed47d755379797887ea8e"} Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.137819 4766 generic.go:334] "Generic (PLEG): container finished" podID="609454d8-b199-4188-8659-5f669b795f79" containerID="f54f401b21a510ce6b22e067758548901204144fd8d3982c99657e6cbb4e2a54" exitCode=0 Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.138451 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"609454d8-b199-4188-8659-5f669b795f79","Type":"ContainerDied","Data":"f54f401b21a510ce6b22e067758548901204144fd8d3982c99657e6cbb4e2a54"} Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.321900 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:49 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:49 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:49 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.322267 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.338394 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bjfhq"] Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.372356 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.377809 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-cnd4q" Jan 02 00:09:49 crc kubenswrapper[4766]: I0102 00:09:49.500294 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.143757 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" event={"ID":"f869b527-abf2-400f-90c2-ebee595564a2","Type":"ContainerStarted","Data":"6e62faa09ce97dbcdde134b84c22ebebe2d577a7584f679cd59330d15d548e5c"} Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.145245 4766 generic.go:334] "Generic (PLEG): container finished" podID="45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" containerID="90be7d0344c3e88e26f8c5396641682cfaa2e12b2e08df62012ed90d31247556" exitCode=0 Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.145291 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bjfhq" event={"ID":"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d","Type":"ContainerDied","Data":"90be7d0344c3e88e26f8c5396641682cfaa2e12b2e08df62012ed90d31247556"} Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.145310 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bjfhq" event={"ID":"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d","Type":"ContainerStarted","Data":"fbe8753f9f5150c90c403c79a7ce0b90d25c6a1a7506ad472a43dcbbe4bfc1ee"} Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.149017 4766 generic.go:334] "Generic (PLEG): container finished" podID="ed7d2a87-a727-4174-ac4d-746b7d7a29fd" containerID="97d70c20e19bb593800ecbfbed35d2b90096f8dea5af56666467e9bd68537428" exitCode=0 Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.149086 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-txcw7" event={"ID":"ed7d2a87-a727-4174-ac4d-746b7d7a29fd","Type":"ContainerDied","Data":"97d70c20e19bb593800ecbfbed35d2b90096f8dea5af56666467e9bd68537428"} Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.155257 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1f3c7284-9fed-4a33-adf5-ae5b5190c0ab","Type":"ContainerStarted","Data":"442e7fe4a1db368542747e4ebe2bdf486905e5e438856b3096c5b127dff61ea3"} Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.155291 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1f3c7284-9fed-4a33-adf5-ae5b5190c0ab","Type":"ContainerStarted","Data":"f9abdb7bef911d4e6f83a93de4c729ba5a2036229bbe17c1474513e07968e60c"} Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.205822 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.205797304 podStartE2EDuration="2.205797304s" podCreationTimestamp="2026-01-02 00:09:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:50.198688775 +0000 UTC m=+164.997175949" watchObservedRunningTime="2026-01-02 00:09:50.205797304 +0000 UTC m=+165.004284458" Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.321832 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:50 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:50 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:50 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.322589 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.459632 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-xldzf" Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.589936 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.674427 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/609454d8-b199-4188-8659-5f669b795f79-kube-api-access\") pod \"609454d8-b199-4188-8659-5f669b795f79\" (UID: \"609454d8-b199-4188-8659-5f669b795f79\") " Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.674475 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/609454d8-b199-4188-8659-5f669b795f79-kubelet-dir\") pod \"609454d8-b199-4188-8659-5f669b795f79\" (UID: \"609454d8-b199-4188-8659-5f669b795f79\") " Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.674775 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/609454d8-b199-4188-8659-5f669b795f79-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "609454d8-b199-4188-8659-5f669b795f79" (UID: "609454d8-b199-4188-8659-5f669b795f79"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.709039 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/609454d8-b199-4188-8659-5f669b795f79-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "609454d8-b199-4188-8659-5f669b795f79" (UID: "609454d8-b199-4188-8659-5f669b795f79"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.776365 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/609454d8-b199-4188-8659-5f669b795f79-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 02 00:09:50 crc kubenswrapper[4766]: I0102 00:09:50.776409 4766 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/609454d8-b199-4188-8659-5f669b795f79-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 02 00:09:51 crc kubenswrapper[4766]: I0102 00:09:51.181073 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zd2t8" event={"ID":"f869b527-abf2-400f-90c2-ebee595564a2","Type":"ContainerStarted","Data":"94a548d8c47f4f46400e2ab0619ed1bd01639e952c8932992b613eb447f1764b"} Jan 02 00:09:51 crc kubenswrapper[4766]: I0102 00:09:51.186711 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"609454d8-b199-4188-8659-5f669b795f79","Type":"ContainerDied","Data":"0eac7fc0d34a7345ec147332f222432c51e8c065789a9c178dbfd0ae4eea6d53"} Jan 02 00:09:51 crc kubenswrapper[4766]: I0102 00:09:51.186747 4766 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0eac7fc0d34a7345ec147332f222432c51e8c065789a9c178dbfd0ae4eea6d53" Jan 02 00:09:51 crc kubenswrapper[4766]: I0102 00:09:51.186753 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 02 00:09:51 crc kubenswrapper[4766]: I0102 00:09:51.200482 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-zd2t8" podStartSLOduration=146.200426661 podStartE2EDuration="2m26.200426661s" podCreationTimestamp="2026-01-02 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:09:51.200013631 +0000 UTC m=+165.998500795" watchObservedRunningTime="2026-01-02 00:09:51.200426661 +0000 UTC m=+165.998913815" Jan 02 00:09:51 crc kubenswrapper[4766]: I0102 00:09:51.210077 4766 generic.go:334] "Generic (PLEG): container finished" podID="1f3c7284-9fed-4a33-adf5-ae5b5190c0ab" containerID="442e7fe4a1db368542747e4ebe2bdf486905e5e438856b3096c5b127dff61ea3" exitCode=0 Jan 02 00:09:51 crc kubenswrapper[4766]: I0102 00:09:51.210135 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1f3c7284-9fed-4a33-adf5-ae5b5190c0ab","Type":"ContainerDied","Data":"442e7fe4a1db368542747e4ebe2bdf486905e5e438856b3096c5b127dff61ea3"} Jan 02 00:09:51 crc kubenswrapper[4766]: I0102 00:09:51.321540 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:51 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:51 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:51 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:51 crc kubenswrapper[4766]: I0102 00:09:51.321907 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:52 crc kubenswrapper[4766]: I0102 00:09:52.319285 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:52 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:52 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:52 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:52 crc kubenswrapper[4766]: I0102 00:09:52.319335 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:52 crc kubenswrapper[4766]: I0102 00:09:52.640707 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 02 00:09:52 crc kubenswrapper[4766]: I0102 00:09:52.816999 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1f3c7284-9fed-4a33-adf5-ae5b5190c0ab-kube-api-access\") pod \"1f3c7284-9fed-4a33-adf5-ae5b5190c0ab\" (UID: \"1f3c7284-9fed-4a33-adf5-ae5b5190c0ab\") " Jan 02 00:09:52 crc kubenswrapper[4766]: I0102 00:09:52.817298 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1f3c7284-9fed-4a33-adf5-ae5b5190c0ab-kubelet-dir\") pod \"1f3c7284-9fed-4a33-adf5-ae5b5190c0ab\" (UID: \"1f3c7284-9fed-4a33-adf5-ae5b5190c0ab\") " Jan 02 00:09:52 crc kubenswrapper[4766]: I0102 00:09:52.817405 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1f3c7284-9fed-4a33-adf5-ae5b5190c0ab-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1f3c7284-9fed-4a33-adf5-ae5b5190c0ab" (UID: "1f3c7284-9fed-4a33-adf5-ae5b5190c0ab"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:09:52 crc kubenswrapper[4766]: I0102 00:09:52.817567 4766 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1f3c7284-9fed-4a33-adf5-ae5b5190c0ab-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 02 00:09:52 crc kubenswrapper[4766]: I0102 00:09:52.830666 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f3c7284-9fed-4a33-adf5-ae5b5190c0ab-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1f3c7284-9fed-4a33-adf5-ae5b5190c0ab" (UID: "1f3c7284-9fed-4a33-adf5-ae5b5190c0ab"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:09:52 crc kubenswrapper[4766]: I0102 00:09:52.918387 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1f3c7284-9fed-4a33-adf5-ae5b5190c0ab-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 02 00:09:53 crc kubenswrapper[4766]: I0102 00:09:53.282652 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1f3c7284-9fed-4a33-adf5-ae5b5190c0ab","Type":"ContainerDied","Data":"f9abdb7bef911d4e6f83a93de4c729ba5a2036229bbe17c1474513e07968e60c"} Jan 02 00:09:53 crc kubenswrapper[4766]: I0102 00:09:53.282703 4766 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9abdb7bef911d4e6f83a93de4c729ba5a2036229bbe17c1474513e07968e60c" Jan 02 00:09:53 crc kubenswrapper[4766]: I0102 00:09:53.282724 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 02 00:09:53 crc kubenswrapper[4766]: I0102 00:09:53.321498 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:53 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:53 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:53 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:53 crc kubenswrapper[4766]: I0102 00:09:53.321555 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:54 crc kubenswrapper[4766]: I0102 00:09:54.181307 4766 patch_prober.go:28] interesting pod/machine-config-daemon-hlsf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 02 00:09:54 crc kubenswrapper[4766]: I0102 00:09:54.181356 4766 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 02 00:09:54 crc kubenswrapper[4766]: I0102 00:09:54.237605 4766 patch_prober.go:28] interesting pod/downloads-7954f5f757-8bc2w container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 02 00:09:54 crc kubenswrapper[4766]: I0102 00:09:54.237626 4766 patch_prober.go:28] interesting pod/downloads-7954f5f757-8bc2w container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 02 00:09:54 crc kubenswrapper[4766]: I0102 00:09:54.237673 4766 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-8bc2w" podUID="509add4d-67be-4dff-8194-55516bc7bbd7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 02 00:09:54 crc kubenswrapper[4766]: I0102 00:09:54.237673 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8bc2w" podUID="509add4d-67be-4dff-8194-55516bc7bbd7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 02 00:09:54 crc kubenswrapper[4766]: I0102 00:09:54.315181 4766 patch_prober.go:28] interesting pod/console-f9d7485db-482gx container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.22:8443/health\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Jan 02 00:09:54 crc kubenswrapper[4766]: I0102 00:09:54.315234 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-482gx" podUID="bb03fd80-3959-4cea-9739-3580d6310a64" containerName="console" probeResult="failure" output="Get \"https://10.217.0.22:8443/health\": dial tcp 10.217.0.22:8443: connect: connection refused" Jan 02 00:09:54 crc kubenswrapper[4766]: I0102 00:09:54.334123 4766 patch_prober.go:28] interesting pod/router-default-5444994796-22rvv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 02 00:09:54 crc kubenswrapper[4766]: [-]has-synced failed: reason withheld Jan 02 00:09:54 crc kubenswrapper[4766]: [+]process-running ok Jan 02 00:09:54 crc kubenswrapper[4766]: healthz check failed Jan 02 00:09:54 crc kubenswrapper[4766]: I0102 00:09:54.334201 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-22rvv" podUID="73d33fe2-474d-4bb3-8e6f-d80ea22650fd" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 02 00:09:55 crc kubenswrapper[4766]: I0102 00:09:55.320133 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:09:55 crc kubenswrapper[4766]: I0102 00:09:55.323703 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-22rvv" Jan 02 00:10:01 crc kubenswrapper[4766]: I0102 00:10:01.043052 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qgsrr"] Jan 02 00:10:01 crc kubenswrapper[4766]: I0102 00:10:01.043635 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" podUID="5ea96e94-99f0-443b-aec1-f500a377a705" containerName="controller-manager" containerID="cri-o://9552f1bf449e24c01e52fe0f2788ea7b73675b134f2610735e1e0b0cdccd9e80" gracePeriod=30 Jan 02 00:10:01 crc kubenswrapper[4766]: I0102 00:10:01.069556 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm"] Jan 02 00:10:01 crc kubenswrapper[4766]: I0102 00:10:01.069764 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" podUID="6e809e90-d3c7-41ea-b346-103b603acc41" containerName="route-controller-manager" containerID="cri-o://3f41c165227cd855efa1065a308ca1b866632947a33ae3572462164c9933cf84" gracePeriod=30 Jan 02 00:10:03 crc kubenswrapper[4766]: I0102 00:10:03.376047 4766 generic.go:334] "Generic (PLEG): container finished" podID="6e809e90-d3c7-41ea-b346-103b603acc41" containerID="3f41c165227cd855efa1065a308ca1b866632947a33ae3572462164c9933cf84" exitCode=0 Jan 02 00:10:03 crc kubenswrapper[4766]: I0102 00:10:03.376147 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" event={"ID":"6e809e90-d3c7-41ea-b346-103b603acc41","Type":"ContainerDied","Data":"3f41c165227cd855efa1065a308ca1b866632947a33ae3572462164c9933cf84"} Jan 02 00:10:03 crc kubenswrapper[4766]: I0102 00:10:03.378250 4766 generic.go:334] "Generic (PLEG): container finished" podID="5ea96e94-99f0-443b-aec1-f500a377a705" containerID="9552f1bf449e24c01e52fe0f2788ea7b73675b134f2610735e1e0b0cdccd9e80" exitCode=0 Jan 02 00:10:03 crc kubenswrapper[4766]: I0102 00:10:03.378287 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" event={"ID":"5ea96e94-99f0-443b-aec1-f500a377a705","Type":"ContainerDied","Data":"9552f1bf449e24c01e52fe0f2788ea7b73675b134f2610735e1e0b0cdccd9e80"} Jan 02 00:10:03 crc kubenswrapper[4766]: I0102 00:10:03.899559 4766 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-qgsrr container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.45:8443/healthz\": dial tcp 10.217.0.45:8443: connect: connection refused" start-of-body= Jan 02 00:10:03 crc kubenswrapper[4766]: I0102 00:10:03.899637 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" podUID="5ea96e94-99f0-443b-aec1-f500a377a705" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.45:8443/healthz\": dial tcp 10.217.0.45:8443: connect: connection refused" Jan 02 00:10:04 crc kubenswrapper[4766]: I0102 00:10:04.086335 4766 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-hqfgm container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jan 02 00:10:04 crc kubenswrapper[4766]: I0102 00:10:04.086415 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" podUID="6e809e90-d3c7-41ea-b346-103b603acc41" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Jan 02 00:10:04 crc kubenswrapper[4766]: I0102 00:10:04.243795 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-8bc2w" Jan 02 00:10:04 crc kubenswrapper[4766]: I0102 00:10:04.305909 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:10:04 crc kubenswrapper[4766]: I0102 00:10:04.313780 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-482gx" Jan 02 00:10:05 crc kubenswrapper[4766]: I0102 00:10:05.180946 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.254508 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.300296 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6"] Jan 02 00:10:09 crc kubenswrapper[4766]: E0102 00:10:09.300767 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e809e90-d3c7-41ea-b346-103b603acc41" containerName="route-controller-manager" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.300781 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e809e90-d3c7-41ea-b346-103b603acc41" containerName="route-controller-manager" Jan 02 00:10:09 crc kubenswrapper[4766]: E0102 00:10:09.300793 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="609454d8-b199-4188-8659-5f669b795f79" containerName="pruner" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.300800 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="609454d8-b199-4188-8659-5f669b795f79" containerName="pruner" Jan 02 00:10:09 crc kubenswrapper[4766]: E0102 00:10:09.300808 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f3c7284-9fed-4a33-adf5-ae5b5190c0ab" containerName="pruner" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.300814 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f3c7284-9fed-4a33-adf5-ae5b5190c0ab" containerName="pruner" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.300935 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e809e90-d3c7-41ea-b346-103b603acc41" containerName="route-controller-manager" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.300965 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="609454d8-b199-4188-8659-5f669b795f79" containerName="pruner" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.300976 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f3c7284-9fed-4a33-adf5-ae5b5190c0ab" containerName="pruner" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.301364 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.303933 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6"] Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.382292 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6e809e90-d3c7-41ea-b346-103b603acc41-client-ca\") pod \"6e809e90-d3c7-41ea-b346-103b603acc41\" (UID: \"6e809e90-d3c7-41ea-b346-103b603acc41\") " Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.382374 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e809e90-d3c7-41ea-b346-103b603acc41-serving-cert\") pod \"6e809e90-d3c7-41ea-b346-103b603acc41\" (UID: \"6e809e90-d3c7-41ea-b346-103b603acc41\") " Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.382432 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e809e90-d3c7-41ea-b346-103b603acc41-config\") pod \"6e809e90-d3c7-41ea-b346-103b603acc41\" (UID: \"6e809e90-d3c7-41ea-b346-103b603acc41\") " Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.382467 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfl46\" (UniqueName: \"kubernetes.io/projected/6e809e90-d3c7-41ea-b346-103b603acc41-kube-api-access-xfl46\") pod \"6e809e90-d3c7-41ea-b346-103b603acc41\" (UID: \"6e809e90-d3c7-41ea-b346-103b603acc41\") " Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.383189 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e809e90-d3c7-41ea-b346-103b603acc41-client-ca" (OuterVolumeSpecName: "client-ca") pod "6e809e90-d3c7-41ea-b346-103b603acc41" (UID: "6e809e90-d3c7-41ea-b346-103b603acc41"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.384066 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e809e90-d3c7-41ea-b346-103b603acc41-config" (OuterVolumeSpecName: "config") pod "6e809e90-d3c7-41ea-b346-103b603acc41" (UID: "6e809e90-d3c7-41ea-b346-103b603acc41"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.392751 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e809e90-d3c7-41ea-b346-103b603acc41-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6e809e90-d3c7-41ea-b346-103b603acc41" (UID: "6e809e90-d3c7-41ea-b346-103b603acc41"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.393084 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e809e90-d3c7-41ea-b346-103b603acc41-kube-api-access-xfl46" (OuterVolumeSpecName: "kube-api-access-xfl46") pod "6e809e90-d3c7-41ea-b346-103b603acc41" (UID: "6e809e90-d3c7-41ea-b346-103b603acc41"). InnerVolumeSpecName "kube-api-access-xfl46". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.408864 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" event={"ID":"6e809e90-d3c7-41ea-b346-103b603acc41","Type":"ContainerDied","Data":"954d9bb56031721f04f901cb748bb91be3905e6f9ab9cc9dc2672869769960cf"} Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.408934 4766 scope.go:117] "RemoveContainer" containerID="3f41c165227cd855efa1065a308ca1b866632947a33ae3572462164c9933cf84" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.408957 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.444935 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm"] Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.451307 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hqfgm"] Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.484494 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/433878df-033f-4cf0-b803-bbd1aa77d865-serving-cert\") pod \"route-controller-manager-868bcf5d76-mn6q6\" (UID: \"433878df-033f-4cf0-b803-bbd1aa77d865\") " pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.484568 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/433878df-033f-4cf0-b803-bbd1aa77d865-config\") pod \"route-controller-manager-868bcf5d76-mn6q6\" (UID: \"433878df-033f-4cf0-b803-bbd1aa77d865\") " pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.484594 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/433878df-033f-4cf0-b803-bbd1aa77d865-client-ca\") pod \"route-controller-manager-868bcf5d76-mn6q6\" (UID: \"433878df-033f-4cf0-b803-bbd1aa77d865\") " pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.484625 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvpxb\" (UniqueName: \"kubernetes.io/projected/433878df-033f-4cf0-b803-bbd1aa77d865-kube-api-access-tvpxb\") pod \"route-controller-manager-868bcf5d76-mn6q6\" (UID: \"433878df-033f-4cf0-b803-bbd1aa77d865\") " pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.484976 4766 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6e809e90-d3c7-41ea-b346-103b603acc41-client-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.485005 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e809e90-d3c7-41ea-b346-103b603acc41-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.485045 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e809e90-d3c7-41ea-b346-103b603acc41-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.485063 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfl46\" (UniqueName: \"kubernetes.io/projected/6e809e90-d3c7-41ea-b346-103b603acc41-kube-api-access-xfl46\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.540743 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e809e90-d3c7-41ea-b346-103b603acc41" path="/var/lib/kubelet/pods/6e809e90-d3c7-41ea-b346-103b603acc41/volumes" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.586141 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/433878df-033f-4cf0-b803-bbd1aa77d865-serving-cert\") pod \"route-controller-manager-868bcf5d76-mn6q6\" (UID: \"433878df-033f-4cf0-b803-bbd1aa77d865\") " pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.586295 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/433878df-033f-4cf0-b803-bbd1aa77d865-config\") pod \"route-controller-manager-868bcf5d76-mn6q6\" (UID: \"433878df-033f-4cf0-b803-bbd1aa77d865\") " pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.586323 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/433878df-033f-4cf0-b803-bbd1aa77d865-client-ca\") pod \"route-controller-manager-868bcf5d76-mn6q6\" (UID: \"433878df-033f-4cf0-b803-bbd1aa77d865\") " pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.586554 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvpxb\" (UniqueName: \"kubernetes.io/projected/433878df-033f-4cf0-b803-bbd1aa77d865-kube-api-access-tvpxb\") pod \"route-controller-manager-868bcf5d76-mn6q6\" (UID: \"433878df-033f-4cf0-b803-bbd1aa77d865\") " pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.587742 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/433878df-033f-4cf0-b803-bbd1aa77d865-client-ca\") pod \"route-controller-manager-868bcf5d76-mn6q6\" (UID: \"433878df-033f-4cf0-b803-bbd1aa77d865\") " pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.588112 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/433878df-033f-4cf0-b803-bbd1aa77d865-config\") pod \"route-controller-manager-868bcf5d76-mn6q6\" (UID: \"433878df-033f-4cf0-b803-bbd1aa77d865\") " pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.591544 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/433878df-033f-4cf0-b803-bbd1aa77d865-serving-cert\") pod \"route-controller-manager-868bcf5d76-mn6q6\" (UID: \"433878df-033f-4cf0-b803-bbd1aa77d865\") " pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.608563 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvpxb\" (UniqueName: \"kubernetes.io/projected/433878df-033f-4cf0-b803-bbd1aa77d865-kube-api-access-tvpxb\") pod \"route-controller-manager-868bcf5d76-mn6q6\" (UID: \"433878df-033f-4cf0-b803-bbd1aa77d865\") " pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" Jan 02 00:10:09 crc kubenswrapper[4766]: I0102 00:10:09.622613 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" Jan 02 00:10:14 crc kubenswrapper[4766]: I0102 00:10:14.899102 4766 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-qgsrr container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.45:8443/healthz\": dial tcp 10.217.0.45:8443: i/o timeout" start-of-body= Jan 02 00:10:14 crc kubenswrapper[4766]: I0102 00:10:14.899519 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" podUID="5ea96e94-99f0-443b-aec1-f500a377a705" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.45:8443/healthz\": dial tcp 10.217.0.45:8443: i/o timeout" Jan 02 00:10:15 crc kubenswrapper[4766]: I0102 00:10:15.417670 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8g66k" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.113053 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.139264 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7cb94686cb-6trst"] Jan 02 00:10:17 crc kubenswrapper[4766]: E0102 00:10:17.139549 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ea96e94-99f0-443b-aec1-f500a377a705" containerName="controller-manager" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.139565 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ea96e94-99f0-443b-aec1-f500a377a705" containerName="controller-manager" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.139705 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ea96e94-99f0-443b-aec1-f500a377a705" containerName="controller-manager" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.140160 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.147725 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7cb94686cb-6trst"] Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.308484 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-proxy-ca-bundles\") pod \"5ea96e94-99f0-443b-aec1-f500a377a705\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.308567 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xs65l\" (UniqueName: \"kubernetes.io/projected/5ea96e94-99f0-443b-aec1-f500a377a705-kube-api-access-xs65l\") pod \"5ea96e94-99f0-443b-aec1-f500a377a705\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.308597 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ea96e94-99f0-443b-aec1-f500a377a705-serving-cert\") pod \"5ea96e94-99f0-443b-aec1-f500a377a705\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.308673 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-config\") pod \"5ea96e94-99f0-443b-aec1-f500a377a705\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.308754 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-client-ca\") pod \"5ea96e94-99f0-443b-aec1-f500a377a705\" (UID: \"5ea96e94-99f0-443b-aec1-f500a377a705\") " Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.309075 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-client-ca\") pod \"controller-manager-7cb94686cb-6trst\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.309104 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-config\") pod \"controller-manager-7cb94686cb-6trst\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.309150 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f116ed80-0992-4868-9e6d-4d85b9b4c100-serving-cert\") pod \"controller-manager-7cb94686cb-6trst\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.309175 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssspp\" (UniqueName: \"kubernetes.io/projected/f116ed80-0992-4868-9e6d-4d85b9b4c100-kube-api-access-ssspp\") pod \"controller-manager-7cb94686cb-6trst\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.309210 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-proxy-ca-bundles\") pod \"controller-manager-7cb94686cb-6trst\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.309483 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "5ea96e94-99f0-443b-aec1-f500a377a705" (UID: "5ea96e94-99f0-443b-aec1-f500a377a705"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.310412 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-config" (OuterVolumeSpecName: "config") pod "5ea96e94-99f0-443b-aec1-f500a377a705" (UID: "5ea96e94-99f0-443b-aec1-f500a377a705"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.310537 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-client-ca" (OuterVolumeSpecName: "client-ca") pod "5ea96e94-99f0-443b-aec1-f500a377a705" (UID: "5ea96e94-99f0-443b-aec1-f500a377a705"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.313485 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ea96e94-99f0-443b-aec1-f500a377a705-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5ea96e94-99f0-443b-aec1-f500a377a705" (UID: "5ea96e94-99f0-443b-aec1-f500a377a705"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.313963 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ea96e94-99f0-443b-aec1-f500a377a705-kube-api-access-xs65l" (OuterVolumeSpecName: "kube-api-access-xs65l") pod "5ea96e94-99f0-443b-aec1-f500a377a705" (UID: "5ea96e94-99f0-443b-aec1-f500a377a705"). InnerVolumeSpecName "kube-api-access-xs65l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.410097 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-client-ca\") pod \"controller-manager-7cb94686cb-6trst\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.410182 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-config\") pod \"controller-manager-7cb94686cb-6trst\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.410266 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f116ed80-0992-4868-9e6d-4d85b9b4c100-serving-cert\") pod \"controller-manager-7cb94686cb-6trst\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.410305 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssspp\" (UniqueName: \"kubernetes.io/projected/f116ed80-0992-4868-9e6d-4d85b9b4c100-kube-api-access-ssspp\") pod \"controller-manager-7cb94686cb-6trst\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.410367 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-proxy-ca-bundles\") pod \"controller-manager-7cb94686cb-6trst\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.410597 4766 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-client-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.410623 4766 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.410646 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xs65l\" (UniqueName: \"kubernetes.io/projected/5ea96e94-99f0-443b-aec1-f500a377a705-kube-api-access-xs65l\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.410666 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ea96e94-99f0-443b-aec1-f500a377a705-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.410684 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ea96e94-99f0-443b-aec1-f500a377a705-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.411361 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-client-ca\") pod \"controller-manager-7cb94686cb-6trst\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.412776 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-config\") pod \"controller-manager-7cb94686cb-6trst\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.413056 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-proxy-ca-bundles\") pod \"controller-manager-7cb94686cb-6trst\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.424050 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f116ed80-0992-4868-9e6d-4d85b9b4c100-serving-cert\") pod \"controller-manager-7cb94686cb-6trst\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.434313 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssspp\" (UniqueName: \"kubernetes.io/projected/f116ed80-0992-4868-9e6d-4d85b9b4c100-kube-api-access-ssspp\") pod \"controller-manager-7cb94686cb-6trst\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.450165 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" event={"ID":"5ea96e94-99f0-443b-aec1-f500a377a705","Type":"ContainerDied","Data":"ca2c4bb45bb7e2b6432d7009a56e169c95c7936bc758d6fa812a02997a6a3e12"} Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.450210 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-qgsrr" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.462724 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.478600 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qgsrr"] Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.481164 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qgsrr"] Jan 02 00:10:17 crc kubenswrapper[4766]: I0102 00:10:17.542252 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ea96e94-99f0-443b-aec1-f500a377a705" path="/var/lib/kubelet/pods/5ea96e94-99f0-443b-aec1-f500a377a705/volumes" Jan 02 00:10:21 crc kubenswrapper[4766]: I0102 00:10:21.024663 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7cb94686cb-6trst"] Jan 02 00:10:21 crc kubenswrapper[4766]: I0102 00:10:21.126019 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6"] Jan 02 00:10:21 crc kubenswrapper[4766]: I0102 00:10:21.481198 4766 generic.go:334] "Generic (PLEG): container finished" podID="89725c43-8074-47e4-9464-52dac202626c" containerID="229cefade20d52b000c50b31c78c287d3cae7fab036bcda7ad3df2f8697c7b36" exitCode=0 Jan 02 00:10:21 crc kubenswrapper[4766]: I0102 00:10:21.481276 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29455200-8qg7p" event={"ID":"89725c43-8074-47e4-9464-52dac202626c","Type":"ContainerDied","Data":"229cefade20d52b000c50b31c78c287d3cae7fab036bcda7ad3df2f8697c7b36"} Jan 02 00:10:24 crc kubenswrapper[4766]: I0102 00:10:24.181282 4766 patch_prober.go:28] interesting pod/machine-config-daemon-hlsf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 02 00:10:24 crc kubenswrapper[4766]: I0102 00:10:24.181656 4766 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 02 00:10:24 crc kubenswrapper[4766]: I0102 00:10:24.665085 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 02 00:10:24 crc kubenswrapper[4766]: I0102 00:10:24.666305 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 02 00:10:24 crc kubenswrapper[4766]: I0102 00:10:24.668979 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 02 00:10:24 crc kubenswrapper[4766]: I0102 00:10:24.669921 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 02 00:10:24 crc kubenswrapper[4766]: I0102 00:10:24.673587 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 02 00:10:24 crc kubenswrapper[4766]: I0102 00:10:24.708871 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dca794b9-1a84-4fb1-ae06-ea60bf2feed9-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"dca794b9-1a84-4fb1-ae06-ea60bf2feed9\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 02 00:10:24 crc kubenswrapper[4766]: I0102 00:10:24.709006 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dca794b9-1a84-4fb1-ae06-ea60bf2feed9-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"dca794b9-1a84-4fb1-ae06-ea60bf2feed9\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 02 00:10:24 crc kubenswrapper[4766]: I0102 00:10:24.810574 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dca794b9-1a84-4fb1-ae06-ea60bf2feed9-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"dca794b9-1a84-4fb1-ae06-ea60bf2feed9\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 02 00:10:24 crc kubenswrapper[4766]: I0102 00:10:24.810675 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dca794b9-1a84-4fb1-ae06-ea60bf2feed9-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"dca794b9-1a84-4fb1-ae06-ea60bf2feed9\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 02 00:10:24 crc kubenswrapper[4766]: I0102 00:10:24.810713 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dca794b9-1a84-4fb1-ae06-ea60bf2feed9-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"dca794b9-1a84-4fb1-ae06-ea60bf2feed9\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 02 00:10:24 crc kubenswrapper[4766]: I0102 00:10:24.838991 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dca794b9-1a84-4fb1-ae06-ea60bf2feed9-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"dca794b9-1a84-4fb1-ae06-ea60bf2feed9\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 02 00:10:24 crc kubenswrapper[4766]: I0102 00:10:24.994474 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 02 00:10:27 crc kubenswrapper[4766]: E0102 00:10:27.922283 4766 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 02 00:10:27 crc kubenswrapper[4766]: E0102 00:10:27.923033 4766 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g9j2q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-bjfhq_openshift-marketplace(45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 02 00:10:27 crc kubenswrapper[4766]: E0102 00:10:27.924411 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-bjfhq" podUID="45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" Jan 02 00:10:29 crc kubenswrapper[4766]: E0102 00:10:29.534794 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-bjfhq" podUID="45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" Jan 02 00:10:29 crc kubenswrapper[4766]: E0102 00:10:29.603968 4766 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 02 00:10:29 crc kubenswrapper[4766]: E0102 00:10:29.604149 4766 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s948f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-7prh8_openshift-marketplace(2298405f-0595-45a3-9586-7ab0c713531b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 02 00:10:29 crc kubenswrapper[4766]: E0102 00:10:29.606833 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-7prh8" podUID="2298405f-0595-45a3-9586-7ab0c713531b" Jan 02 00:10:30 crc kubenswrapper[4766]: I0102 00:10:30.254408 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 02 00:10:30 crc kubenswrapper[4766]: I0102 00:10:30.255455 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 02 00:10:30 crc kubenswrapper[4766]: I0102 00:10:30.259095 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 02 00:10:30 crc kubenswrapper[4766]: I0102 00:10:30.289040 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-kube-api-access\") pod \"installer-9-crc\" (UID: \"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 02 00:10:30 crc kubenswrapper[4766]: I0102 00:10:30.289166 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-kubelet-dir\") pod \"installer-9-crc\" (UID: \"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 02 00:10:30 crc kubenswrapper[4766]: I0102 00:10:30.289425 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-var-lock\") pod \"installer-9-crc\" (UID: \"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 02 00:10:30 crc kubenswrapper[4766]: I0102 00:10:30.391226 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-var-lock\") pod \"installer-9-crc\" (UID: \"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 02 00:10:30 crc kubenswrapper[4766]: I0102 00:10:30.391316 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-kube-api-access\") pod \"installer-9-crc\" (UID: \"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 02 00:10:30 crc kubenswrapper[4766]: I0102 00:10:30.391355 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-kubelet-dir\") pod \"installer-9-crc\" (UID: \"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 02 00:10:30 crc kubenswrapper[4766]: I0102 00:10:30.391381 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-var-lock\") pod \"installer-9-crc\" (UID: \"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 02 00:10:30 crc kubenswrapper[4766]: I0102 00:10:30.391438 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-kubelet-dir\") pod \"installer-9-crc\" (UID: \"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 02 00:10:30 crc kubenswrapper[4766]: I0102 00:10:30.411361 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-kube-api-access\") pod \"installer-9-crc\" (UID: \"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 02 00:10:30 crc kubenswrapper[4766]: I0102 00:10:30.575626 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 02 00:10:30 crc kubenswrapper[4766]: E0102 00:10:30.929028 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-7prh8" podUID="2298405f-0595-45a3-9586-7ab0c713531b" Jan 02 00:10:30 crc kubenswrapper[4766]: E0102 00:10:30.998000 4766 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 02 00:10:30 crc kubenswrapper[4766]: E0102 00:10:30.998170 4766 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m8ppf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-c457j_openshift-marketplace(0c1f7ccd-9415-4560-968d-f20c5c44df58): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 02 00:10:30 crc kubenswrapper[4766]: E0102 00:10:30.999415 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-c457j" podUID="0c1f7ccd-9415-4560-968d-f20c5c44df58" Jan 02 00:10:31 crc kubenswrapper[4766]: E0102 00:10:31.007733 4766 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 02 00:10:31 crc kubenswrapper[4766]: E0102 00:10:31.007924 4766 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lr74q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-r2cdb_openshift-marketplace(d04a5cef-a04c-4451-9b0f-ba9dea4877f2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 02 00:10:31 crc kubenswrapper[4766]: E0102 00:10:31.010448 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-r2cdb" podUID="d04a5cef-a04c-4451-9b0f-ba9dea4877f2" Jan 02 00:10:32 crc kubenswrapper[4766]: E0102 00:10:32.360638 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-r2cdb" podUID="d04a5cef-a04c-4451-9b0f-ba9dea4877f2" Jan 02 00:10:32 crc kubenswrapper[4766]: I0102 00:10:32.369186 4766 scope.go:117] "RemoveContainer" containerID="9552f1bf449e24c01e52fe0f2788ea7b73675b134f2610735e1e0b0cdccd9e80" Jan 02 00:10:32 crc kubenswrapper[4766]: I0102 00:10:32.462381 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29455200-8qg7p" Jan 02 00:10:32 crc kubenswrapper[4766]: E0102 00:10:32.469212 4766 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 02 00:10:32 crc kubenswrapper[4766]: E0102 00:10:32.469369 4766 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l8sf6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-kmkx9_openshift-marketplace(c5f043c1-da6b-433d-8db2-47b8d31ab74c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 02 00:10:32 crc kubenswrapper[4766]: E0102 00:10:32.470635 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-kmkx9" podUID="c5f043c1-da6b-433d-8db2-47b8d31ab74c" Jan 02 00:10:32 crc kubenswrapper[4766]: E0102 00:10:32.493836 4766 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 02 00:10:32 crc kubenswrapper[4766]: E0102 00:10:32.493993 4766 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ptd2x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-txcw7_openshift-marketplace(ed7d2a87-a727-4174-ac4d-746b7d7a29fd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 02 00:10:32 crc kubenswrapper[4766]: E0102 00:10:32.495222 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-txcw7" podUID="ed7d2a87-a727-4174-ac4d-746b7d7a29fd" Jan 02 00:10:32 crc kubenswrapper[4766]: I0102 00:10:32.518563 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9fpfk\" (UniqueName: \"kubernetes.io/projected/89725c43-8074-47e4-9464-52dac202626c-kube-api-access-9fpfk\") pod \"89725c43-8074-47e4-9464-52dac202626c\" (UID: \"89725c43-8074-47e4-9464-52dac202626c\") " Jan 02 00:10:32 crc kubenswrapper[4766]: I0102 00:10:32.518622 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/89725c43-8074-47e4-9464-52dac202626c-serviceca\") pod \"89725c43-8074-47e4-9464-52dac202626c\" (UID: \"89725c43-8074-47e4-9464-52dac202626c\") " Jan 02 00:10:32 crc kubenswrapper[4766]: I0102 00:10:32.519663 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89725c43-8074-47e4-9464-52dac202626c-serviceca" (OuterVolumeSpecName: "serviceca") pod "89725c43-8074-47e4-9464-52dac202626c" (UID: "89725c43-8074-47e4-9464-52dac202626c"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:10:32 crc kubenswrapper[4766]: E0102 00:10:32.521369 4766 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 02 00:10:32 crc kubenswrapper[4766]: E0102 00:10:32.521632 4766 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kvrpt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-4pfq5_openshift-marketplace(52a3f025-cab4-4df2-851c-a3144fc4e9de): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 02 00:10:32 crc kubenswrapper[4766]: E0102 00:10:32.523292 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-4pfq5" podUID="52a3f025-cab4-4df2-851c-a3144fc4e9de" Jan 02 00:10:32 crc kubenswrapper[4766]: I0102 00:10:32.531154 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89725c43-8074-47e4-9464-52dac202626c-kube-api-access-9fpfk" (OuterVolumeSpecName: "kube-api-access-9fpfk") pod "89725c43-8074-47e4-9464-52dac202626c" (UID: "89725c43-8074-47e4-9464-52dac202626c"). InnerVolumeSpecName "kube-api-access-9fpfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:10:32 crc kubenswrapper[4766]: I0102 00:10:32.552119 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29455200-8qg7p" event={"ID":"89725c43-8074-47e4-9464-52dac202626c","Type":"ContainerDied","Data":"011c5667faf0bdaf367702a20580cc03f1c7b330e2db809e2bbd53304a7449e7"} Jan 02 00:10:32 crc kubenswrapper[4766]: I0102 00:10:32.552180 4766 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="011c5667faf0bdaf367702a20580cc03f1c7b330e2db809e2bbd53304a7449e7" Jan 02 00:10:32 crc kubenswrapper[4766]: I0102 00:10:32.552262 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29455200-8qg7p" Jan 02 00:10:32 crc kubenswrapper[4766]: E0102 00:10:32.556750 4766 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 02 00:10:32 crc kubenswrapper[4766]: E0102 00:10:32.556965 4766 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bdd79,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-b76n6_openshift-marketplace(296488e0-7f00-413d-87fb-d8cecb1b186c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 02 00:10:32 crc kubenswrapper[4766]: E0102 00:10:32.558466 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-b76n6" podUID="296488e0-7f00-413d-87fb-d8cecb1b186c" Jan 02 00:10:32 crc kubenswrapper[4766]: E0102 00:10:32.564143 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-txcw7" podUID="ed7d2a87-a727-4174-ac4d-746b7d7a29fd" Jan 02 00:10:32 crc kubenswrapper[4766]: E0102 00:10:32.578151 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-4pfq5" podUID="52a3f025-cab4-4df2-851c-a3144fc4e9de" Jan 02 00:10:32 crc kubenswrapper[4766]: E0102 00:10:32.578417 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-kmkx9" podUID="c5f043c1-da6b-433d-8db2-47b8d31ab74c" Jan 02 00:10:32 crc kubenswrapper[4766]: I0102 00:10:32.620163 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9fpfk\" (UniqueName: \"kubernetes.io/projected/89725c43-8074-47e4-9464-52dac202626c-kube-api-access-9fpfk\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:32 crc kubenswrapper[4766]: I0102 00:10:32.620204 4766 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/89725c43-8074-47e4-9464-52dac202626c-serviceca\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:32 crc kubenswrapper[4766]: I0102 00:10:32.635171 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7cb94686cb-6trst"] Jan 02 00:10:32 crc kubenswrapper[4766]: W0102 00:10:32.641859 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf116ed80_0992_4868_9e6d_4d85b9b4c100.slice/crio-c8734b25cfc772984503d0667b45e06a0e7f6df129146bc89cb53e01416b5a49 WatchSource:0}: Error finding container c8734b25cfc772984503d0667b45e06a0e7f6df129146bc89cb53e01416b5a49: Status 404 returned error can't find the container with id c8734b25cfc772984503d0667b45e06a0e7f6df129146bc89cb53e01416b5a49 Jan 02 00:10:32 crc kubenswrapper[4766]: I0102 00:10:32.807892 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 02 00:10:32 crc kubenswrapper[4766]: I0102 00:10:32.934697 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 02 00:10:32 crc kubenswrapper[4766]: I0102 00:10:32.955080 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6"] Jan 02 00:10:32 crc kubenswrapper[4766]: W0102 00:10:32.972820 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod433878df_033f_4cf0_b803_bbd1aa77d865.slice/crio-5a03b467f9dcaee3c1c3ace56f0f4464b198b129cec0ef351183e023b3177fa2 WatchSource:0}: Error finding container 5a03b467f9dcaee3c1c3ace56f0f4464b198b129cec0ef351183e023b3177fa2: Status 404 returned error can't find the container with id 5a03b467f9dcaee3c1c3ace56f0f4464b198b129cec0ef351183e023b3177fa2 Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.566824 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0","Type":"ContainerStarted","Data":"0b90c39abd328b9f254cea759281ecb100b3121d844c993720fa490e54601566"} Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.568295 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0","Type":"ContainerStarted","Data":"46fcb6f5aa9f6eed17d34c5bbe2ca817ab0c53327f86c81db75fe8df3113a436"} Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.568757 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" event={"ID":"f116ed80-0992-4868-9e6d-4d85b9b4c100","Type":"ContainerStarted","Data":"fcbea22137102d987449e42568061b58d0d156c5f11adefeef51d2d5ab2626e6"} Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.568782 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" event={"ID":"f116ed80-0992-4868-9e6d-4d85b9b4c100","Type":"ContainerStarted","Data":"c8734b25cfc772984503d0667b45e06a0e7f6df129146bc89cb53e01416b5a49"} Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.568995 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" podUID="f116ed80-0992-4868-9e6d-4d85b9b4c100" containerName="controller-manager" containerID="cri-o://fcbea22137102d987449e42568061b58d0d156c5f11adefeef51d2d5ab2626e6" gracePeriod=30 Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.569184 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.573272 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"dca794b9-1a84-4fb1-ae06-ea60bf2feed9","Type":"ContainerStarted","Data":"0d56b140a720916ef55381885d192df73b7ab7c3780c4bef495be1acced1b43c"} Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.573312 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"dca794b9-1a84-4fb1-ae06-ea60bf2feed9","Type":"ContainerStarted","Data":"72b4f15b1fb4d5ceb68cf69b252da13e3bdf76339c1b57b8630a945261768bb4"} Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.579280 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" podUID="433878df-033f-4cf0-b803-bbd1aa77d865" containerName="route-controller-manager" containerID="cri-o://5c40b9bfdc50580f76cfa1975dca0e751f562d7d7abcde504bd3d86d485ff761" gracePeriod=30 Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.579437 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" event={"ID":"433878df-033f-4cf0-b803-bbd1aa77d865","Type":"ContainerStarted","Data":"5c40b9bfdc50580f76cfa1975dca0e751f562d7d7abcde504bd3d86d485ff761"} Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.579475 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" event={"ID":"433878df-033f-4cf0-b803-bbd1aa77d865","Type":"ContainerStarted","Data":"5a03b467f9dcaee3c1c3ace56f0f4464b198b129cec0ef351183e023b3177fa2"} Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.579505 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.579517 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" Jan 02 00:10:33 crc kubenswrapper[4766]: E0102 00:10:33.581917 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-b76n6" podUID="296488e0-7f00-413d-87fb-d8cecb1b186c" Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.587337 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" podStartSLOduration=32.587320397 podStartE2EDuration="32.587320397s" podCreationTimestamp="2026-01-02 00:10:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:10:33.586098824 +0000 UTC m=+208.384585978" watchObservedRunningTime="2026-01-02 00:10:33.587320397 +0000 UTC m=+208.385807551" Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.592824 4766 patch_prober.go:28] interesting pod/route-controller-manager-868bcf5d76-mn6q6 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.56:8443/healthz\": read tcp 10.217.0.2:41084->10.217.0.56:8443: read: connection reset by peer" start-of-body= Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.592887 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" podUID="433878df-033f-4cf0-b803-bbd1aa77d865" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.56:8443/healthz\": read tcp 10.217.0.2:41084->10.217.0.56:8443: read: connection reset by peer" Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.602669 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=9.602650084 podStartE2EDuration="9.602650084s" podCreationTimestamp="2026-01-02 00:10:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:10:33.601120712 +0000 UTC m=+208.399607876" watchObservedRunningTime="2026-01-02 00:10:33.602650084 +0000 UTC m=+208.401137238" Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.655132 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" podStartSLOduration=32.65511328 podStartE2EDuration="32.65511328s" podCreationTimestamp="2026-01-02 00:10:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:10:33.654040951 +0000 UTC m=+208.452528115" watchObservedRunningTime="2026-01-02 00:10:33.65511328 +0000 UTC m=+208.453600434" Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.969338 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-868bcf5d76-mn6q6_433878df-033f-4cf0-b803-bbd1aa77d865/route-controller-manager/0.log" Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.972315 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" Jan 02 00:10:33 crc kubenswrapper[4766]: I0102 00:10:33.972047 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.045129 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-client-ca\") pod \"f116ed80-0992-4868-9e6d-4d85b9b4c100\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.045181 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f116ed80-0992-4868-9e6d-4d85b9b4c100-serving-cert\") pod \"f116ed80-0992-4868-9e6d-4d85b9b4c100\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.045203 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/433878df-033f-4cf0-b803-bbd1aa77d865-serving-cert\") pod \"433878df-033f-4cf0-b803-bbd1aa77d865\" (UID: \"433878df-033f-4cf0-b803-bbd1aa77d865\") " Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.045245 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssspp\" (UniqueName: \"kubernetes.io/projected/f116ed80-0992-4868-9e6d-4d85b9b4c100-kube-api-access-ssspp\") pod \"f116ed80-0992-4868-9e6d-4d85b9b4c100\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.045278 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvpxb\" (UniqueName: \"kubernetes.io/projected/433878df-033f-4cf0-b803-bbd1aa77d865-kube-api-access-tvpxb\") pod \"433878df-033f-4cf0-b803-bbd1aa77d865\" (UID: \"433878df-033f-4cf0-b803-bbd1aa77d865\") " Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.045306 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/433878df-033f-4cf0-b803-bbd1aa77d865-config\") pod \"433878df-033f-4cf0-b803-bbd1aa77d865\" (UID: \"433878df-033f-4cf0-b803-bbd1aa77d865\") " Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.045342 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/433878df-033f-4cf0-b803-bbd1aa77d865-client-ca\") pod \"433878df-033f-4cf0-b803-bbd1aa77d865\" (UID: \"433878df-033f-4cf0-b803-bbd1aa77d865\") " Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.045402 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-config\") pod \"f116ed80-0992-4868-9e6d-4d85b9b4c100\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.046038 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-client-ca" (OuterVolumeSpecName: "client-ca") pod "f116ed80-0992-4868-9e6d-4d85b9b4c100" (UID: "f116ed80-0992-4868-9e6d-4d85b9b4c100"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.046100 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/433878df-033f-4cf0-b803-bbd1aa77d865-config" (OuterVolumeSpecName: "config") pod "433878df-033f-4cf0-b803-bbd1aa77d865" (UID: "433878df-033f-4cf0-b803-bbd1aa77d865"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.046127 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-config" (OuterVolumeSpecName: "config") pod "f116ed80-0992-4868-9e6d-4d85b9b4c100" (UID: "f116ed80-0992-4868-9e6d-4d85b9b4c100"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.046185 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/433878df-033f-4cf0-b803-bbd1aa77d865-client-ca" (OuterVolumeSpecName: "client-ca") pod "433878df-033f-4cf0-b803-bbd1aa77d865" (UID: "433878df-033f-4cf0-b803-bbd1aa77d865"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.046287 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-proxy-ca-bundles\") pod \"f116ed80-0992-4868-9e6d-4d85b9b4c100\" (UID: \"f116ed80-0992-4868-9e6d-4d85b9b4c100\") " Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.046782 4766 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-client-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.046808 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/433878df-033f-4cf0-b803-bbd1aa77d865-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.046819 4766 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/433878df-033f-4cf0-b803-bbd1aa77d865-client-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.046827 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.046802 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "f116ed80-0992-4868-9e6d-4d85b9b4c100" (UID: "f116ed80-0992-4868-9e6d-4d85b9b4c100"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.050442 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f116ed80-0992-4868-9e6d-4d85b9b4c100-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f116ed80-0992-4868-9e6d-4d85b9b4c100" (UID: "f116ed80-0992-4868-9e6d-4d85b9b4c100"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.050543 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/433878df-033f-4cf0-b803-bbd1aa77d865-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "433878df-033f-4cf0-b803-bbd1aa77d865" (UID: "433878df-033f-4cf0-b803-bbd1aa77d865"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.051113 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f116ed80-0992-4868-9e6d-4d85b9b4c100-kube-api-access-ssspp" (OuterVolumeSpecName: "kube-api-access-ssspp") pod "f116ed80-0992-4868-9e6d-4d85b9b4c100" (UID: "f116ed80-0992-4868-9e6d-4d85b9b4c100"). InnerVolumeSpecName "kube-api-access-ssspp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.051151 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/433878df-033f-4cf0-b803-bbd1aa77d865-kube-api-access-tvpxb" (OuterVolumeSpecName: "kube-api-access-tvpxb") pod "433878df-033f-4cf0-b803-bbd1aa77d865" (UID: "433878df-033f-4cf0-b803-bbd1aa77d865"). InnerVolumeSpecName "kube-api-access-tvpxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.148483 4766 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f116ed80-0992-4868-9e6d-4d85b9b4c100-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.148753 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f116ed80-0992-4868-9e6d-4d85b9b4c100-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.148767 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/433878df-033f-4cf0-b803-bbd1aa77d865-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.148779 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssspp\" (UniqueName: \"kubernetes.io/projected/f116ed80-0992-4868-9e6d-4d85b9b4c100-kube-api-access-ssspp\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.148794 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvpxb\" (UniqueName: \"kubernetes.io/projected/433878df-033f-4cf0-b803-bbd1aa77d865-kube-api-access-tvpxb\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.585212 4766 generic.go:334] "Generic (PLEG): container finished" podID="f116ed80-0992-4868-9e6d-4d85b9b4c100" containerID="fcbea22137102d987449e42568061b58d0d156c5f11adefeef51d2d5ab2626e6" exitCode=0 Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.585472 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" event={"ID":"f116ed80-0992-4868-9e6d-4d85b9b4c100","Type":"ContainerDied","Data":"fcbea22137102d987449e42568061b58d0d156c5f11adefeef51d2d5ab2626e6"} Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.585503 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" event={"ID":"f116ed80-0992-4868-9e6d-4d85b9b4c100","Type":"ContainerDied","Data":"c8734b25cfc772984503d0667b45e06a0e7f6df129146bc89cb53e01416b5a49"} Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.585522 4766 scope.go:117] "RemoveContainer" containerID="fcbea22137102d987449e42568061b58d0d156c5f11adefeef51d2d5ab2626e6" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.585617 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cb94686cb-6trst" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.589147 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-868bcf5d76-mn6q6_433878df-033f-4cf0-b803-bbd1aa77d865/route-controller-manager/0.log" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.589189 4766 generic.go:334] "Generic (PLEG): container finished" podID="433878df-033f-4cf0-b803-bbd1aa77d865" containerID="5c40b9bfdc50580f76cfa1975dca0e751f562d7d7abcde504bd3d86d485ff761" exitCode=255 Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.589323 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.589409 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" event={"ID":"433878df-033f-4cf0-b803-bbd1aa77d865","Type":"ContainerDied","Data":"5c40b9bfdc50580f76cfa1975dca0e751f562d7d7abcde504bd3d86d485ff761"} Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.589524 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6" event={"ID":"433878df-033f-4cf0-b803-bbd1aa77d865","Type":"ContainerDied","Data":"5a03b467f9dcaee3c1c3ace56f0f4464b198b129cec0ef351183e023b3177fa2"} Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.592953 4766 generic.go:334] "Generic (PLEG): container finished" podID="dca794b9-1a84-4fb1-ae06-ea60bf2feed9" containerID="0d56b140a720916ef55381885d192df73b7ab7c3780c4bef495be1acced1b43c" exitCode=0 Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.593367 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"dca794b9-1a84-4fb1-ae06-ea60bf2feed9","Type":"ContainerDied","Data":"0d56b140a720916ef55381885d192df73b7ab7c3780c4bef495be1acced1b43c"} Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.606589 4766 scope.go:117] "RemoveContainer" containerID="fcbea22137102d987449e42568061b58d0d156c5f11adefeef51d2d5ab2626e6" Jan 02 00:10:34 crc kubenswrapper[4766]: E0102 00:10:34.608648 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcbea22137102d987449e42568061b58d0d156c5f11adefeef51d2d5ab2626e6\": container with ID starting with fcbea22137102d987449e42568061b58d0d156c5f11adefeef51d2d5ab2626e6 not found: ID does not exist" containerID="fcbea22137102d987449e42568061b58d0d156c5f11adefeef51d2d5ab2626e6" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.608712 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcbea22137102d987449e42568061b58d0d156c5f11adefeef51d2d5ab2626e6"} err="failed to get container status \"fcbea22137102d987449e42568061b58d0d156c5f11adefeef51d2d5ab2626e6\": rpc error: code = NotFound desc = could not find container \"fcbea22137102d987449e42568061b58d0d156c5f11adefeef51d2d5ab2626e6\": container with ID starting with fcbea22137102d987449e42568061b58d0d156c5f11adefeef51d2d5ab2626e6 not found: ID does not exist" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.608739 4766 scope.go:117] "RemoveContainer" containerID="5c40b9bfdc50580f76cfa1975dca0e751f562d7d7abcde504bd3d86d485ff761" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.609100 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=4.609086704 podStartE2EDuration="4.609086704s" podCreationTimestamp="2026-01-02 00:10:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:10:34.608722794 +0000 UTC m=+209.407209968" watchObservedRunningTime="2026-01-02 00:10:34.609086704 +0000 UTC m=+209.407573858" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.631960 4766 scope.go:117] "RemoveContainer" containerID="5c40b9bfdc50580f76cfa1975dca0e751f562d7d7abcde504bd3d86d485ff761" Jan 02 00:10:34 crc kubenswrapper[4766]: E0102 00:10:34.632501 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c40b9bfdc50580f76cfa1975dca0e751f562d7d7abcde504bd3d86d485ff761\": container with ID starting with 5c40b9bfdc50580f76cfa1975dca0e751f562d7d7abcde504bd3d86d485ff761 not found: ID does not exist" containerID="5c40b9bfdc50580f76cfa1975dca0e751f562d7d7abcde504bd3d86d485ff761" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.632537 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c40b9bfdc50580f76cfa1975dca0e751f562d7d7abcde504bd3d86d485ff761"} err="failed to get container status \"5c40b9bfdc50580f76cfa1975dca0e751f562d7d7abcde504bd3d86d485ff761\": rpc error: code = NotFound desc = could not find container \"5c40b9bfdc50580f76cfa1975dca0e751f562d7d7abcde504bd3d86d485ff761\": container with ID starting with 5c40b9bfdc50580f76cfa1975dca0e751f562d7d7abcde504bd3d86d485ff761 not found: ID does not exist" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.633624 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7cb94686cb-6trst"] Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.636757 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7cb94686cb-6trst"] Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.644199 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6"] Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.646949 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-868bcf5d76-mn6q6"] Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.668407 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6"] Jan 02 00:10:34 crc kubenswrapper[4766]: E0102 00:10:34.671546 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="433878df-033f-4cf0-b803-bbd1aa77d865" containerName="route-controller-manager" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.671578 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="433878df-033f-4cf0-b803-bbd1aa77d865" containerName="route-controller-manager" Jan 02 00:10:34 crc kubenswrapper[4766]: E0102 00:10:34.671601 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89725c43-8074-47e4-9464-52dac202626c" containerName="image-pruner" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.671608 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="89725c43-8074-47e4-9464-52dac202626c" containerName="image-pruner" Jan 02 00:10:34 crc kubenswrapper[4766]: E0102 00:10:34.671626 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f116ed80-0992-4868-9e6d-4d85b9b4c100" containerName="controller-manager" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.671633 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="f116ed80-0992-4868-9e6d-4d85b9b4c100" containerName="controller-manager" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.671867 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="f116ed80-0992-4868-9e6d-4d85b9b4c100" containerName="controller-manager" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.671885 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="89725c43-8074-47e4-9464-52dac202626c" containerName="image-pruner" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.671901 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="433878df-033f-4cf0-b803-bbd1aa77d865" containerName="route-controller-manager" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.672483 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.674427 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7bdbd88689-kjxjr"] Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.676022 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.681404 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.681415 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.681445 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.682174 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.682349 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.682681 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.682970 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.683298 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.684658 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.685010 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.685167 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.686643 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.692235 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.692321 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7bdbd88689-kjxjr"] Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.699640 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6"] Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.756305 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-client-ca\") pod \"controller-manager-7bdbd88689-kjxjr\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.756597 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ba296a10-72d7-4719-9fcf-31bc93249ee2-serving-cert\") pod \"route-controller-manager-795fdd6df6-tmcd6\" (UID: \"ba296a10-72d7-4719-9fcf-31bc93249ee2\") " pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.756735 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-config\") pod \"controller-manager-7bdbd88689-kjxjr\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.756888 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-proxy-ca-bundles\") pod \"controller-manager-7bdbd88689-kjxjr\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.757005 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba296a10-72d7-4719-9fcf-31bc93249ee2-config\") pod \"route-controller-manager-795fdd6df6-tmcd6\" (UID: \"ba296a10-72d7-4719-9fcf-31bc93249ee2\") " pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.757114 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ba296a10-72d7-4719-9fcf-31bc93249ee2-client-ca\") pod \"route-controller-manager-795fdd6df6-tmcd6\" (UID: \"ba296a10-72d7-4719-9fcf-31bc93249ee2\") " pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.757214 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb7km\" (UniqueName: \"kubernetes.io/projected/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-kube-api-access-rb7km\") pod \"controller-manager-7bdbd88689-kjxjr\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.757318 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-serving-cert\") pod \"controller-manager-7bdbd88689-kjxjr\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.757456 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxzzh\" (UniqueName: \"kubernetes.io/projected/ba296a10-72d7-4719-9fcf-31bc93249ee2-kube-api-access-nxzzh\") pod \"route-controller-manager-795fdd6df6-tmcd6\" (UID: \"ba296a10-72d7-4719-9fcf-31bc93249ee2\") " pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.859023 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-client-ca\") pod \"controller-manager-7bdbd88689-kjxjr\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.859322 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ba296a10-72d7-4719-9fcf-31bc93249ee2-serving-cert\") pod \"route-controller-manager-795fdd6df6-tmcd6\" (UID: \"ba296a10-72d7-4719-9fcf-31bc93249ee2\") " pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.859459 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-config\") pod \"controller-manager-7bdbd88689-kjxjr\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.859564 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-proxy-ca-bundles\") pod \"controller-manager-7bdbd88689-kjxjr\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.859666 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba296a10-72d7-4719-9fcf-31bc93249ee2-config\") pod \"route-controller-manager-795fdd6df6-tmcd6\" (UID: \"ba296a10-72d7-4719-9fcf-31bc93249ee2\") " pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.859782 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ba296a10-72d7-4719-9fcf-31bc93249ee2-client-ca\") pod \"route-controller-manager-795fdd6df6-tmcd6\" (UID: \"ba296a10-72d7-4719-9fcf-31bc93249ee2\") " pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.859920 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb7km\" (UniqueName: \"kubernetes.io/projected/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-kube-api-access-rb7km\") pod \"controller-manager-7bdbd88689-kjxjr\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.860039 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-serving-cert\") pod \"controller-manager-7bdbd88689-kjxjr\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.860164 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxzzh\" (UniqueName: \"kubernetes.io/projected/ba296a10-72d7-4719-9fcf-31bc93249ee2-kube-api-access-nxzzh\") pod \"route-controller-manager-795fdd6df6-tmcd6\" (UID: \"ba296a10-72d7-4719-9fcf-31bc93249ee2\") " pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.861170 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ba296a10-72d7-4719-9fcf-31bc93249ee2-client-ca\") pod \"route-controller-manager-795fdd6df6-tmcd6\" (UID: \"ba296a10-72d7-4719-9fcf-31bc93249ee2\") " pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.860043 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-client-ca\") pod \"controller-manager-7bdbd88689-kjxjr\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.861502 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba296a10-72d7-4719-9fcf-31bc93249ee2-config\") pod \"route-controller-manager-795fdd6df6-tmcd6\" (UID: \"ba296a10-72d7-4719-9fcf-31bc93249ee2\") " pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.862476 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-proxy-ca-bundles\") pod \"controller-manager-7bdbd88689-kjxjr\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.862661 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-config\") pod \"controller-manager-7bdbd88689-kjxjr\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.865902 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-serving-cert\") pod \"controller-manager-7bdbd88689-kjxjr\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.874863 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ba296a10-72d7-4719-9fcf-31bc93249ee2-serving-cert\") pod \"route-controller-manager-795fdd6df6-tmcd6\" (UID: \"ba296a10-72d7-4719-9fcf-31bc93249ee2\") " pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.876302 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb7km\" (UniqueName: \"kubernetes.io/projected/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-kube-api-access-rb7km\") pod \"controller-manager-7bdbd88689-kjxjr\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.876754 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxzzh\" (UniqueName: \"kubernetes.io/projected/ba296a10-72d7-4719-9fcf-31bc93249ee2-kube-api-access-nxzzh\") pod \"route-controller-manager-795fdd6df6-tmcd6\" (UID: \"ba296a10-72d7-4719-9fcf-31bc93249ee2\") " pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:10:34 crc kubenswrapper[4766]: I0102 00:10:34.995570 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.008157 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.234025 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7bdbd88689-kjxjr"] Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.268406 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6"] Jan 02 00:10:35 crc kubenswrapper[4766]: W0102 00:10:35.273590 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podba296a10_72d7_4719_9fcf_31bc93249ee2.slice/crio-3dc8c65325f2c25c8bfc630f206d3e026c2e1d10b08740deee03ad9b7f92c3b2 WatchSource:0}: Error finding container 3dc8c65325f2c25c8bfc630f206d3e026c2e1d10b08740deee03ad9b7f92c3b2: Status 404 returned error can't find the container with id 3dc8c65325f2c25c8bfc630f206d3e026c2e1d10b08740deee03ad9b7f92c3b2 Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.540977 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="433878df-033f-4cf0-b803-bbd1aa77d865" path="/var/lib/kubelet/pods/433878df-033f-4cf0-b803-bbd1aa77d865/volumes" Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.541935 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f116ed80-0992-4868-9e6d-4d85b9b4c100" path="/var/lib/kubelet/pods/f116ed80-0992-4868-9e6d-4d85b9b4c100/volumes" Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.599236 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" event={"ID":"ba296a10-72d7-4719-9fcf-31bc93249ee2","Type":"ContainerStarted","Data":"dcc08280ab5a3bca61ebaa0e584f8adcb487d25e799a8c428657ac903139a8ae"} Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.599296 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.599307 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" event={"ID":"ba296a10-72d7-4719-9fcf-31bc93249ee2","Type":"ContainerStarted","Data":"3dc8c65325f2c25c8bfc630f206d3e026c2e1d10b08740deee03ad9b7f92c3b2"} Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.602983 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" event={"ID":"62c6bad5-b417-4f5c-aa08-737a3b1e8b57","Type":"ContainerStarted","Data":"f7e8fc0d5c58e1eaf99121b82c6d012216aabcf7b6843919ee87eca08f5b7a9b"} Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.603022 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" event={"ID":"62c6bad5-b417-4f5c-aa08-737a3b1e8b57","Type":"ContainerStarted","Data":"33ff14dcb481ebbc4a08f6490a54b8d2f0d03ba5b7ea61a7f5777d60420effc9"} Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.603740 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.611889 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.654175 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" podStartSLOduration=14.654158466 podStartE2EDuration="14.654158466s" podCreationTimestamp="2026-01-02 00:10:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:10:35.625008983 +0000 UTC m=+210.423496147" watchObservedRunningTime="2026-01-02 00:10:35.654158466 +0000 UTC m=+210.452645620" Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.654891 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" podStartSLOduration=14.654885005 podStartE2EDuration="14.654885005s" podCreationTimestamp="2026-01-02 00:10:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:10:35.650022393 +0000 UTC m=+210.448509547" watchObservedRunningTime="2026-01-02 00:10:35.654885005 +0000 UTC m=+210.453372159" Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.684717 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.845772 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.985921 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dca794b9-1a84-4fb1-ae06-ea60bf2feed9-kubelet-dir\") pod \"dca794b9-1a84-4fb1-ae06-ea60bf2feed9\" (UID: \"dca794b9-1a84-4fb1-ae06-ea60bf2feed9\") " Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.986017 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dca794b9-1a84-4fb1-ae06-ea60bf2feed9-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "dca794b9-1a84-4fb1-ae06-ea60bf2feed9" (UID: "dca794b9-1a84-4fb1-ae06-ea60bf2feed9"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.986210 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dca794b9-1a84-4fb1-ae06-ea60bf2feed9-kube-api-access\") pod \"dca794b9-1a84-4fb1-ae06-ea60bf2feed9\" (UID: \"dca794b9-1a84-4fb1-ae06-ea60bf2feed9\") " Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.986661 4766 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dca794b9-1a84-4fb1-ae06-ea60bf2feed9-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:35 crc kubenswrapper[4766]: I0102 00:10:35.992161 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dca794b9-1a84-4fb1-ae06-ea60bf2feed9-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "dca794b9-1a84-4fb1-ae06-ea60bf2feed9" (UID: "dca794b9-1a84-4fb1-ae06-ea60bf2feed9"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:10:36 crc kubenswrapper[4766]: I0102 00:10:36.087397 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dca794b9-1a84-4fb1-ae06-ea60bf2feed9-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:36 crc kubenswrapper[4766]: I0102 00:10:36.613142 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"dca794b9-1a84-4fb1-ae06-ea60bf2feed9","Type":"ContainerDied","Data":"72b4f15b1fb4d5ceb68cf69b252da13e3bdf76339c1b57b8630a945261768bb4"} Jan 02 00:10:36 crc kubenswrapper[4766]: I0102 00:10:36.613187 4766 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72b4f15b1fb4d5ceb68cf69b252da13e3bdf76339c1b57b8630a945261768bb4" Jan 02 00:10:36 crc kubenswrapper[4766]: I0102 00:10:36.613189 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 02 00:10:45 crc kubenswrapper[4766]: I0102 00:10:45.669142 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c457j" event={"ID":"0c1f7ccd-9415-4560-968d-f20c5c44df58","Type":"ContainerStarted","Data":"e811c49cb37624838e8c92de434c42d1160792ef1e03e03a8197ad7810cdc450"} Jan 02 00:10:46 crc kubenswrapper[4766]: I0102 00:10:46.680380 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r2cdb" event={"ID":"d04a5cef-a04c-4451-9b0f-ba9dea4877f2","Type":"ContainerStarted","Data":"c7222d7e76060d52cc36bae838c753e5e997677a1457dca68ab414feaf6f610e"} Jan 02 00:10:46 crc kubenswrapper[4766]: I0102 00:10:46.682989 4766 generic.go:334] "Generic (PLEG): container finished" podID="2298405f-0595-45a3-9586-7ab0c713531b" containerID="0feb848ebf5d3459059a696026c637a61861bf53ba5ebad3a916148f775cf550" exitCode=0 Jan 02 00:10:46 crc kubenswrapper[4766]: I0102 00:10:46.683052 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7prh8" event={"ID":"2298405f-0595-45a3-9586-7ab0c713531b","Type":"ContainerDied","Data":"0feb848ebf5d3459059a696026c637a61861bf53ba5ebad3a916148f775cf550"} Jan 02 00:10:46 crc kubenswrapper[4766]: I0102 00:10:46.686120 4766 generic.go:334] "Generic (PLEG): container finished" podID="45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" containerID="8ed8c74dd9458cf7ce8d644fe3a34ce1ad9c53b83c05bd8c742074016f139fef" exitCode=0 Jan 02 00:10:46 crc kubenswrapper[4766]: I0102 00:10:46.686206 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bjfhq" event={"ID":"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d","Type":"ContainerDied","Data":"8ed8c74dd9458cf7ce8d644fe3a34ce1ad9c53b83c05bd8c742074016f139fef"} Jan 02 00:10:46 crc kubenswrapper[4766]: I0102 00:10:46.688911 4766 generic.go:334] "Generic (PLEG): container finished" podID="0c1f7ccd-9415-4560-968d-f20c5c44df58" containerID="e811c49cb37624838e8c92de434c42d1160792ef1e03e03a8197ad7810cdc450" exitCode=0 Jan 02 00:10:46 crc kubenswrapper[4766]: I0102 00:10:46.688948 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c457j" event={"ID":"0c1f7ccd-9415-4560-968d-f20c5c44df58","Type":"ContainerDied","Data":"e811c49cb37624838e8c92de434c42d1160792ef1e03e03a8197ad7810cdc450"} Jan 02 00:10:47 crc kubenswrapper[4766]: I0102 00:10:47.712209 4766 generic.go:334] "Generic (PLEG): container finished" podID="296488e0-7f00-413d-87fb-d8cecb1b186c" containerID="a9e81a11e314bb74d009a669061d0d36e8dc052991155cab265eb8bda6ca6f25" exitCode=0 Jan 02 00:10:47 crc kubenswrapper[4766]: I0102 00:10:47.712645 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b76n6" event={"ID":"296488e0-7f00-413d-87fb-d8cecb1b186c","Type":"ContainerDied","Data":"a9e81a11e314bb74d009a669061d0d36e8dc052991155cab265eb8bda6ca6f25"} Jan 02 00:10:47 crc kubenswrapper[4766]: I0102 00:10:47.721386 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7prh8" event={"ID":"2298405f-0595-45a3-9586-7ab0c713531b","Type":"ContainerStarted","Data":"e1089b741cb0b75dc2fca3fb03ee9747a978a4f8af4a6baeb53567a178a5fd95"} Jan 02 00:10:47 crc kubenswrapper[4766]: I0102 00:10:47.727776 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bjfhq" event={"ID":"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d","Type":"ContainerStarted","Data":"186f54861fecf54c118749b7ce1225dbcee76df84492e37e26e8822c341e2a09"} Jan 02 00:10:47 crc kubenswrapper[4766]: I0102 00:10:47.730443 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c457j" event={"ID":"0c1f7ccd-9415-4560-968d-f20c5c44df58","Type":"ContainerStarted","Data":"e0049eae8534d2267dc4d0cd55b93c7b66df176ffca906fea39adea194daf1f0"} Jan 02 00:10:47 crc kubenswrapper[4766]: I0102 00:10:47.732531 4766 generic.go:334] "Generic (PLEG): container finished" podID="d04a5cef-a04c-4451-9b0f-ba9dea4877f2" containerID="c7222d7e76060d52cc36bae838c753e5e997677a1457dca68ab414feaf6f610e" exitCode=0 Jan 02 00:10:47 crc kubenswrapper[4766]: I0102 00:10:47.732561 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r2cdb" event={"ID":"d04a5cef-a04c-4451-9b0f-ba9dea4877f2","Type":"ContainerDied","Data":"c7222d7e76060d52cc36bae838c753e5e997677a1457dca68ab414feaf6f610e"} Jan 02 00:10:47 crc kubenswrapper[4766]: I0102 00:10:47.792078 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-c457j" podStartSLOduration=3.694287429 podStartE2EDuration="1m2.792058132s" podCreationTimestamp="2026-01-02 00:09:45 +0000 UTC" firstStartedPulling="2026-01-02 00:09:48.101604631 +0000 UTC m=+162.900091785" lastFinishedPulling="2026-01-02 00:10:47.199375344 +0000 UTC m=+221.997862488" observedRunningTime="2026-01-02 00:10:47.7894113 +0000 UTC m=+222.587898474" watchObservedRunningTime="2026-01-02 00:10:47.792058132 +0000 UTC m=+222.590545276" Jan 02 00:10:47 crc kubenswrapper[4766]: I0102 00:10:47.813907 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bjfhq" podStartSLOduration=2.650026157 podStartE2EDuration="59.813888035s" podCreationTimestamp="2026-01-02 00:09:48 +0000 UTC" firstStartedPulling="2026-01-02 00:09:50.146601847 +0000 UTC m=+164.945089001" lastFinishedPulling="2026-01-02 00:10:47.310463725 +0000 UTC m=+222.108950879" observedRunningTime="2026-01-02 00:10:47.812126497 +0000 UTC m=+222.610613661" watchObservedRunningTime="2026-01-02 00:10:47.813888035 +0000 UTC m=+222.612375179" Jan 02 00:10:47 crc kubenswrapper[4766]: I0102 00:10:47.839663 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7prh8" podStartSLOduration=3.794344529 podStartE2EDuration="1m2.839647106s" podCreationTimestamp="2026-01-02 00:09:45 +0000 UTC" firstStartedPulling="2026-01-02 00:09:48.088005919 +0000 UTC m=+162.886493073" lastFinishedPulling="2026-01-02 00:10:47.133308496 +0000 UTC m=+221.931795650" observedRunningTime="2026-01-02 00:10:47.836500551 +0000 UTC m=+222.634987705" watchObservedRunningTime="2026-01-02 00:10:47.839647106 +0000 UTC m=+222.638134260" Jan 02 00:10:48 crc kubenswrapper[4766]: I0102 00:10:48.740251 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r2cdb" event={"ID":"d04a5cef-a04c-4451-9b0f-ba9dea4877f2","Type":"ContainerStarted","Data":"97ab39af3402bc54d16c1d84fb4df1267890e99d41d5d488b4d88cd80703bf44"} Jan 02 00:10:48 crc kubenswrapper[4766]: I0102 00:10:48.744081 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b76n6" event={"ID":"296488e0-7f00-413d-87fb-d8cecb1b186c","Type":"ContainerStarted","Data":"273fd290cc1ba07575271d9531d437b958370a221a54214044ece03be402385f"} Jan 02 00:10:48 crc kubenswrapper[4766]: I0102 00:10:48.746154 4766 generic.go:334] "Generic (PLEG): container finished" podID="52a3f025-cab4-4df2-851c-a3144fc4e9de" containerID="3d05f34110e01546e2b50029c653ac7c19ef33f3d727899f8ed01222d5da5807" exitCode=0 Jan 02 00:10:48 crc kubenswrapper[4766]: I0102 00:10:48.746271 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pfq5" event={"ID":"52a3f025-cab4-4df2-851c-a3144fc4e9de","Type":"ContainerDied","Data":"3d05f34110e01546e2b50029c653ac7c19ef33f3d727899f8ed01222d5da5807"} Jan 02 00:10:48 crc kubenswrapper[4766]: I0102 00:10:48.748416 4766 generic.go:334] "Generic (PLEG): container finished" podID="ed7d2a87-a727-4174-ac4d-746b7d7a29fd" containerID="80c05e285dd1bec5b9c4b9820db74a7eb11353fac6d654b24a8ec7780e865352" exitCode=0 Jan 02 00:10:48 crc kubenswrapper[4766]: I0102 00:10:48.748455 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-txcw7" event={"ID":"ed7d2a87-a727-4174-ac4d-746b7d7a29fd","Type":"ContainerDied","Data":"80c05e285dd1bec5b9c4b9820db74a7eb11353fac6d654b24a8ec7780e865352"} Jan 02 00:10:48 crc kubenswrapper[4766]: I0102 00:10:48.770933 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-r2cdb" podStartSLOduration=3.74512006 podStartE2EDuration="1m3.770904002s" podCreationTimestamp="2026-01-02 00:09:45 +0000 UTC" firstStartedPulling="2026-01-02 00:09:48.10197527 +0000 UTC m=+162.900462424" lastFinishedPulling="2026-01-02 00:10:48.127759212 +0000 UTC m=+222.926246366" observedRunningTime="2026-01-02 00:10:48.77045846 +0000 UTC m=+223.568945604" watchObservedRunningTime="2026-01-02 00:10:48.770904002 +0000 UTC m=+223.569391166" Jan 02 00:10:48 crc kubenswrapper[4766]: I0102 00:10:48.848792 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-b76n6" podStartSLOduration=2.798447688 podStartE2EDuration="1m1.84875829s" podCreationTimestamp="2026-01-02 00:09:47 +0000 UTC" firstStartedPulling="2026-01-02 00:09:49.240200475 +0000 UTC m=+164.038687629" lastFinishedPulling="2026-01-02 00:10:48.290511077 +0000 UTC m=+223.088998231" observedRunningTime="2026-01-02 00:10:48.84767755 +0000 UTC m=+223.646164714" watchObservedRunningTime="2026-01-02 00:10:48.84875829 +0000 UTC m=+223.647245444" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.022747 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bjfhq" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.022829 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bjfhq" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.444006 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-85btb"] Jan 02 00:10:49 crc kubenswrapper[4766]: E0102 00:10:49.444611 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dca794b9-1a84-4fb1-ae06-ea60bf2feed9" containerName="pruner" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.444628 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="dca794b9-1a84-4fb1-ae06-ea60bf2feed9" containerName="pruner" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.444784 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="dca794b9-1a84-4fb1-ae06-ea60bf2feed9" containerName="pruner" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.445292 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.462434 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-85btb"] Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.594943 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eb356a5a-58ab-4002-9628-966d947f6628-bound-sa-token\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.595029 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/eb356a5a-58ab-4002-9628-966d947f6628-registry-certificates\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.595060 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/eb356a5a-58ab-4002-9628-966d947f6628-registry-tls\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.595078 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/eb356a5a-58ab-4002-9628-966d947f6628-installation-pull-secrets\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.595099 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eb356a5a-58ab-4002-9628-966d947f6628-trusted-ca\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.595140 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.595177 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/eb356a5a-58ab-4002-9628-966d947f6628-ca-trust-extracted\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.595193 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6twj6\" (UniqueName: \"kubernetes.io/projected/eb356a5a-58ab-4002-9628-966d947f6628-kube-api-access-6twj6\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.646263 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.696672 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/eb356a5a-58ab-4002-9628-966d947f6628-ca-trust-extracted\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.696737 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6twj6\" (UniqueName: \"kubernetes.io/projected/eb356a5a-58ab-4002-9628-966d947f6628-kube-api-access-6twj6\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.696772 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eb356a5a-58ab-4002-9628-966d947f6628-bound-sa-token\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.696820 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/eb356a5a-58ab-4002-9628-966d947f6628-registry-certificates\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.696865 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/eb356a5a-58ab-4002-9628-966d947f6628-registry-tls\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.696884 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/eb356a5a-58ab-4002-9628-966d947f6628-installation-pull-secrets\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.696905 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eb356a5a-58ab-4002-9628-966d947f6628-trusted-ca\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.699103 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eb356a5a-58ab-4002-9628-966d947f6628-trusted-ca\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.699426 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/eb356a5a-58ab-4002-9628-966d947f6628-registry-certificates\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.699804 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/eb356a5a-58ab-4002-9628-966d947f6628-ca-trust-extracted\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.703005 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/eb356a5a-58ab-4002-9628-966d947f6628-installation-pull-secrets\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.703620 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/eb356a5a-58ab-4002-9628-966d947f6628-registry-tls\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.718374 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6twj6\" (UniqueName: \"kubernetes.io/projected/eb356a5a-58ab-4002-9628-966d947f6628-kube-api-access-6twj6\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.734444 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eb356a5a-58ab-4002-9628-966d947f6628-bound-sa-token\") pod \"image-registry-66df7c8f76-85btb\" (UID: \"eb356a5a-58ab-4002-9628-966d947f6628\") " pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.763096 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:49 crc kubenswrapper[4766]: I0102 00:10:49.877909 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kmkx9" event={"ID":"c5f043c1-da6b-433d-8db2-47b8d31ab74c","Type":"ContainerStarted","Data":"9bbd2e830a5218dec2c9bc2958ca9d2b69c51eb81c680c26a591a2961a0c3aad"} Jan 02 00:10:50 crc kubenswrapper[4766]: I0102 00:10:50.201478 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bjfhq" podUID="45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" containerName="registry-server" probeResult="failure" output=< Jan 02 00:10:50 crc kubenswrapper[4766]: timeout: failed to connect service ":50051" within 1s Jan 02 00:10:50 crc kubenswrapper[4766]: > Jan 02 00:10:50 crc kubenswrapper[4766]: I0102 00:10:50.378230 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-85btb"] Jan 02 00:10:50 crc kubenswrapper[4766]: W0102 00:10:50.384013 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb356a5a_58ab_4002_9628_966d947f6628.slice/crio-b03a1e0b66d7629a8d6b89f757f5d367ca4d81ded1fc46549aba36767b0f16f4 WatchSource:0}: Error finding container b03a1e0b66d7629a8d6b89f757f5d367ca4d81ded1fc46549aba36767b0f16f4: Status 404 returned error can't find the container with id b03a1e0b66d7629a8d6b89f757f5d367ca4d81ded1fc46549aba36767b0f16f4 Jan 02 00:10:50 crc kubenswrapper[4766]: I0102 00:10:50.883733 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-85btb" event={"ID":"eb356a5a-58ab-4002-9628-966d947f6628","Type":"ContainerStarted","Data":"aac93ac0bc7c3599469da3748ffe84c01e0ef8cd2886f3b6c536e254f8aba861"} Jan 02 00:10:50 crc kubenswrapper[4766]: I0102 00:10:50.884104 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-85btb" event={"ID":"eb356a5a-58ab-4002-9628-966d947f6628","Type":"ContainerStarted","Data":"b03a1e0b66d7629a8d6b89f757f5d367ca4d81ded1fc46549aba36767b0f16f4"} Jan 02 00:10:50 crc kubenswrapper[4766]: I0102 00:10:50.884243 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:10:50 crc kubenswrapper[4766]: I0102 00:10:50.886089 4766 generic.go:334] "Generic (PLEG): container finished" podID="c5f043c1-da6b-433d-8db2-47b8d31ab74c" containerID="9bbd2e830a5218dec2c9bc2958ca9d2b69c51eb81c680c26a591a2961a0c3aad" exitCode=0 Jan 02 00:10:50 crc kubenswrapper[4766]: I0102 00:10:50.886120 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kmkx9" event={"ID":"c5f043c1-da6b-433d-8db2-47b8d31ab74c","Type":"ContainerDied","Data":"9bbd2e830a5218dec2c9bc2958ca9d2b69c51eb81c680c26a591a2961a0c3aad"} Jan 02 00:10:50 crc kubenswrapper[4766]: I0102 00:10:50.912268 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-85btb" podStartSLOduration=1.9122399159999999 podStartE2EDuration="1.912239916s" podCreationTimestamp="2026-01-02 00:10:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:10:50.909470361 +0000 UTC m=+225.707957515" watchObservedRunningTime="2026-01-02 00:10:50.912239916 +0000 UTC m=+225.710727070" Jan 02 00:10:54 crc kubenswrapper[4766]: I0102 00:10:54.180602 4766 patch_prober.go:28] interesting pod/machine-config-daemon-hlsf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 02 00:10:54 crc kubenswrapper[4766]: I0102 00:10:54.181249 4766 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 02 00:10:54 crc kubenswrapper[4766]: I0102 00:10:54.181304 4766 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:10:54 crc kubenswrapper[4766]: I0102 00:10:54.182124 4766 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be"} pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 02 00:10:54 crc kubenswrapper[4766]: I0102 00:10:54.182205 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" containerID="cri-o://b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be" gracePeriod=600 Jan 02 00:10:55 crc kubenswrapper[4766]: I0102 00:10:55.606981 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-c457j" Jan 02 00:10:55 crc kubenswrapper[4766]: I0102 00:10:55.608205 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-c457j" Jan 02 00:10:55 crc kubenswrapper[4766]: I0102 00:10:55.685734 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-c457j" Jan 02 00:10:55 crc kubenswrapper[4766]: I0102 00:10:55.827085 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7prh8" Jan 02 00:10:55 crc kubenswrapper[4766]: I0102 00:10:55.827448 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7prh8" Jan 02 00:10:55 crc kubenswrapper[4766]: I0102 00:10:55.874104 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7prh8" Jan 02 00:10:55 crc kubenswrapper[4766]: I0102 00:10:55.920261 4766 generic.go:334] "Generic (PLEG): container finished" podID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerID="b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be" exitCode=0 Jan 02 00:10:55 crc kubenswrapper[4766]: I0102 00:10:55.921215 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" event={"ID":"a62872f4-c5c6-4f43-ba8a-40e0c96e6968","Type":"ContainerDied","Data":"b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be"} Jan 02 00:10:55 crc kubenswrapper[4766]: I0102 00:10:55.958296 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-c457j" Jan 02 00:10:55 crc kubenswrapper[4766]: I0102 00:10:55.962474 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7prh8" Jan 02 00:10:55 crc kubenswrapper[4766]: I0102 00:10:55.987175 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-r2cdb" Jan 02 00:10:55 crc kubenswrapper[4766]: I0102 00:10:55.987210 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-r2cdb" Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.031246 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-r2cdb" Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.730274 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4pfq5"] Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.738859 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7prh8"] Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.748477 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c457j"] Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.758106 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r2cdb"] Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.772665 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qlzhc"] Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.773105 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" podUID="c27e1f6d-5925-4ca0-89f2-f447afbf845f" containerName="marketplace-operator" containerID="cri-o://d2330ceeb23365890361dd88cc88eda1b72fe700da0c86ecbb54523ef42aef76" gracePeriod=30 Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.781046 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b76n6"] Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.781340 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-b76n6" podUID="296488e0-7f00-413d-87fb-d8cecb1b186c" containerName="registry-server" containerID="cri-o://273fd290cc1ba07575271d9531d437b958370a221a54214044ece03be402385f" gracePeriod=30 Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.787655 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kmkx9"] Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.792892 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bjfhq"] Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.793179 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bjfhq" podUID="45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" containerName="registry-server" containerID="cri-o://186f54861fecf54c118749b7ce1225dbcee76df84492e37e26e8822c341e2a09" gracePeriod=30 Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.795221 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-g8fpt"] Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.795871 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-g8fpt" Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.813354 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-txcw7"] Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.815772 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-g8fpt"] Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.891460 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59x7b\" (UniqueName: \"kubernetes.io/projected/82bbd02f-f842-47cf-bea4-2dcd3e78a02e-kube-api-access-59x7b\") pod \"marketplace-operator-79b997595-g8fpt\" (UID: \"82bbd02f-f842-47cf-bea4-2dcd3e78a02e\") " pod="openshift-marketplace/marketplace-operator-79b997595-g8fpt" Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.891540 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82bbd02f-f842-47cf-bea4-2dcd3e78a02e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-g8fpt\" (UID: \"82bbd02f-f842-47cf-bea4-2dcd3e78a02e\") " pod="openshift-marketplace/marketplace-operator-79b997595-g8fpt" Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.891605 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/82bbd02f-f842-47cf-bea4-2dcd3e78a02e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-g8fpt\" (UID: \"82bbd02f-f842-47cf-bea4-2dcd3e78a02e\") " pod="openshift-marketplace/marketplace-operator-79b997595-g8fpt" Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.927764 4766 generic.go:334] "Generic (PLEG): container finished" podID="45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" containerID="186f54861fecf54c118749b7ce1225dbcee76df84492e37e26e8822c341e2a09" exitCode=0 Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.927836 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bjfhq" event={"ID":"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d","Type":"ContainerDied","Data":"186f54861fecf54c118749b7ce1225dbcee76df84492e37e26e8822c341e2a09"} Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.932909 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-txcw7" event={"ID":"ed7d2a87-a727-4174-ac4d-746b7d7a29fd","Type":"ContainerStarted","Data":"4bba10727e36c8cbeb45ba5eb6d336d783e8bb0579cee8a3ef1b03ca83c13dab"} Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.933061 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-txcw7" podUID="ed7d2a87-a727-4174-ac4d-746b7d7a29fd" containerName="registry-server" containerID="cri-o://4bba10727e36c8cbeb45ba5eb6d336d783e8bb0579cee8a3ef1b03ca83c13dab" gracePeriod=30 Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.937975 4766 generic.go:334] "Generic (PLEG): container finished" podID="c27e1f6d-5925-4ca0-89f2-f447afbf845f" containerID="d2330ceeb23365890361dd88cc88eda1b72fe700da0c86ecbb54523ef42aef76" exitCode=0 Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.938150 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" event={"ID":"c27e1f6d-5925-4ca0-89f2-f447afbf845f","Type":"ContainerDied","Data":"d2330ceeb23365890361dd88cc88eda1b72fe700da0c86ecbb54523ef42aef76"} Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.950338 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-txcw7" podStartSLOduration=3.340609484 podStartE2EDuration="1m8.950319295s" podCreationTimestamp="2026-01-02 00:09:48 +0000 UTC" firstStartedPulling="2026-01-02 00:09:50.152953846 +0000 UTC m=+164.951441000" lastFinishedPulling="2026-01-02 00:10:55.762663657 +0000 UTC m=+230.561150811" observedRunningTime="2026-01-02 00:10:56.949503303 +0000 UTC m=+231.747990457" watchObservedRunningTime="2026-01-02 00:10:56.950319295 +0000 UTC m=+231.748806449" Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.990698 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-r2cdb" Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.993525 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59x7b\" (UniqueName: \"kubernetes.io/projected/82bbd02f-f842-47cf-bea4-2dcd3e78a02e-kube-api-access-59x7b\") pod \"marketplace-operator-79b997595-g8fpt\" (UID: \"82bbd02f-f842-47cf-bea4-2dcd3e78a02e\") " pod="openshift-marketplace/marketplace-operator-79b997595-g8fpt" Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.993773 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82bbd02f-f842-47cf-bea4-2dcd3e78a02e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-g8fpt\" (UID: \"82bbd02f-f842-47cf-bea4-2dcd3e78a02e\") " pod="openshift-marketplace/marketplace-operator-79b997595-g8fpt" Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.994043 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/82bbd02f-f842-47cf-bea4-2dcd3e78a02e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-g8fpt\" (UID: \"82bbd02f-f842-47cf-bea4-2dcd3e78a02e\") " pod="openshift-marketplace/marketplace-operator-79b997595-g8fpt" Jan 02 00:10:56 crc kubenswrapper[4766]: I0102 00:10:56.996296 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82bbd02f-f842-47cf-bea4-2dcd3e78a02e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-g8fpt\" (UID: \"82bbd02f-f842-47cf-bea4-2dcd3e78a02e\") " pod="openshift-marketplace/marketplace-operator-79b997595-g8fpt" Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.002260 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/82bbd02f-f842-47cf-bea4-2dcd3e78a02e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-g8fpt\" (UID: \"82bbd02f-f842-47cf-bea4-2dcd3e78a02e\") " pod="openshift-marketplace/marketplace-operator-79b997595-g8fpt" Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.012382 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59x7b\" (UniqueName: \"kubernetes.io/projected/82bbd02f-f842-47cf-bea4-2dcd3e78a02e-kube-api-access-59x7b\") pod \"marketplace-operator-79b997595-g8fpt\" (UID: \"82bbd02f-f842-47cf-bea4-2dcd3e78a02e\") " pod="openshift-marketplace/marketplace-operator-79b997595-g8fpt" Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.112521 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-g8fpt" Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.163689 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-czkht"] Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.517949 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b76n6" Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.609049 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/296488e0-7f00-413d-87fb-d8cecb1b186c-catalog-content\") pod \"296488e0-7f00-413d-87fb-d8cecb1b186c\" (UID: \"296488e0-7f00-413d-87fb-d8cecb1b186c\") " Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.609145 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/296488e0-7f00-413d-87fb-d8cecb1b186c-utilities\") pod \"296488e0-7f00-413d-87fb-d8cecb1b186c\" (UID: \"296488e0-7f00-413d-87fb-d8cecb1b186c\") " Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.609216 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdd79\" (UniqueName: \"kubernetes.io/projected/296488e0-7f00-413d-87fb-d8cecb1b186c-kube-api-access-bdd79\") pod \"296488e0-7f00-413d-87fb-d8cecb1b186c\" (UID: \"296488e0-7f00-413d-87fb-d8cecb1b186c\") " Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.614395 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/296488e0-7f00-413d-87fb-d8cecb1b186c-utilities" (OuterVolumeSpecName: "utilities") pod "296488e0-7f00-413d-87fb-d8cecb1b186c" (UID: "296488e0-7f00-413d-87fb-d8cecb1b186c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.623829 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/296488e0-7f00-413d-87fb-d8cecb1b186c-kube-api-access-bdd79" (OuterVolumeSpecName: "kube-api-access-bdd79") pod "296488e0-7f00-413d-87fb-d8cecb1b186c" (UID: "296488e0-7f00-413d-87fb-d8cecb1b186c"). InnerVolumeSpecName "kube-api-access-bdd79". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.641307 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/296488e0-7f00-413d-87fb-d8cecb1b186c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "296488e0-7f00-413d-87fb-d8cecb1b186c" (UID: "296488e0-7f00-413d-87fb-d8cecb1b186c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:10:57 crc kubenswrapper[4766]: E0102 00:10:57.675588 4766 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poded7d2a87_a727_4174_ac4d_746b7d7a29fd.slice/crio-4bba10727e36c8cbeb45ba5eb6d336d783e8bb0579cee8a3ef1b03ca83c13dab.scope\": RecentStats: unable to find data in memory cache]" Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.713157 4766 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/296488e0-7f00-413d-87fb-d8cecb1b186c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.713195 4766 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/296488e0-7f00-413d-87fb-d8cecb1b186c-utilities\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.713207 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdd79\" (UniqueName: \"kubernetes.io/projected/296488e0-7f00-413d-87fb-d8cecb1b186c-kube-api-access-bdd79\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.745825 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r2cdb"] Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.946206 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" event={"ID":"a62872f4-c5c6-4f43-ba8a-40e0c96e6968","Type":"ContainerStarted","Data":"bce71262223102a1e221708ed0cb2549b154d42a224262c021bd129659081e5c"} Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.949239 4766 generic.go:334] "Generic (PLEG): container finished" podID="296488e0-7f00-413d-87fb-d8cecb1b186c" containerID="273fd290cc1ba07575271d9531d437b958370a221a54214044ece03be402385f" exitCode=0 Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.949330 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b76n6" event={"ID":"296488e0-7f00-413d-87fb-d8cecb1b186c","Type":"ContainerDied","Data":"273fd290cc1ba07575271d9531d437b958370a221a54214044ece03be402385f"} Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.949357 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b76n6" event={"ID":"296488e0-7f00-413d-87fb-d8cecb1b186c","Type":"ContainerDied","Data":"26b35f1adb2f61ae6af26bfb2553dacdc842142ddd5dd96478e903e4619e1d78"} Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.949374 4766 scope.go:117] "RemoveContainer" containerID="273fd290cc1ba07575271d9531d437b958370a221a54214044ece03be402385f" Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.949503 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b76n6" Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.975306 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pfq5" event={"ID":"52a3f025-cab4-4df2-851c-a3144fc4e9de","Type":"ContainerStarted","Data":"646229f4622c1eac4e6dffd202c7e2bc35cfcff0144225664840d6d436143390"} Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.975678 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4pfq5" podUID="52a3f025-cab4-4df2-851c-a3144fc4e9de" containerName="registry-server" containerID="cri-o://646229f4622c1eac4e6dffd202c7e2bc35cfcff0144225664840d6d436143390" gracePeriod=30 Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.978814 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-txcw7_ed7d2a87-a727-4174-ac4d-746b7d7a29fd/registry-server/0.log" Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.979611 4766 generic.go:334] "Generic (PLEG): container finished" podID="ed7d2a87-a727-4174-ac4d-746b7d7a29fd" containerID="4bba10727e36c8cbeb45ba5eb6d336d783e8bb0579cee8a3ef1b03ca83c13dab" exitCode=1 Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.979668 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-txcw7" event={"ID":"ed7d2a87-a727-4174-ac4d-746b7d7a29fd","Type":"ContainerDied","Data":"4bba10727e36c8cbeb45ba5eb6d336d783e8bb0579cee8a3ef1b03ca83c13dab"} Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.986314 4766 scope.go:117] "RemoveContainer" containerID="a9e81a11e314bb74d009a669061d0d36e8dc052991155cab265eb8bda6ca6f25" Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.991448 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kmkx9" event={"ID":"c5f043c1-da6b-433d-8db2-47b8d31ab74c","Type":"ContainerStarted","Data":"21bdb9499a8a435aea4badd88fe533394542a0a3a8437582000b55a23c25dee7"} Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.991517 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-c457j" podUID="0c1f7ccd-9415-4560-968d-f20c5c44df58" containerName="registry-server" containerID="cri-o://e0049eae8534d2267dc4d0cd55b93c7b66df176ffca906fea39adea194daf1f0" gracePeriod=30 Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.991626 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kmkx9" podUID="c5f043c1-da6b-433d-8db2-47b8d31ab74c" containerName="registry-server" containerID="cri-o://21bdb9499a8a435aea4badd88fe533394542a0a3a8437582000b55a23c25dee7" gracePeriod=30 Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.991893 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-r2cdb" podUID="d04a5cef-a04c-4451-9b0f-ba9dea4877f2" containerName="registry-server" containerID="cri-o://97ab39af3402bc54d16c1d84fb4df1267890e99d41d5d488b4d88cd80703bf44" gracePeriod=30 Jan 02 00:10:57 crc kubenswrapper[4766]: I0102 00:10:57.992027 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7prh8" podUID="2298405f-0595-45a3-9586-7ab0c713531b" containerName="registry-server" containerID="cri-o://e1089b741cb0b75dc2fca3fb03ee9747a978a4f8af4a6baeb53567a178a5fd95" gracePeriod=30 Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.025426 4766 scope.go:117] "RemoveContainer" containerID="a2c853298d984527b6b9eaf7e0a632c2918be72eac7b8dbfbf0db880a626844f" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.030832 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kmkx9" podStartSLOduration=3.34027037 podStartE2EDuration="1m11.03081331s" podCreationTimestamp="2026-01-02 00:09:47 +0000 UTC" firstStartedPulling="2026-01-02 00:09:49.241441967 +0000 UTC m=+164.039929121" lastFinishedPulling="2026-01-02 00:10:56.931984907 +0000 UTC m=+231.730472061" observedRunningTime="2026-01-02 00:10:58.02824881 +0000 UTC m=+232.826735974" watchObservedRunningTime="2026-01-02 00:10:58.03081331 +0000 UTC m=+232.829300464" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.034262 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4pfq5" podStartSLOduration=4.185556434 podStartE2EDuration="1m13.033985726s" podCreationTimestamp="2026-01-02 00:09:45 +0000 UTC" firstStartedPulling="2026-01-02 00:09:48.099072407 +0000 UTC m=+162.897559561" lastFinishedPulling="2026-01-02 00:10:56.947501699 +0000 UTC m=+231.745988853" observedRunningTime="2026-01-02 00:10:58.007256569 +0000 UTC m=+232.805743723" watchObservedRunningTime="2026-01-02 00:10:58.033985726 +0000 UTC m=+232.832472900" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.050143 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b76n6"] Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.053820 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-b76n6"] Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.078042 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-g8fpt"] Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.310558 4766 scope.go:117] "RemoveContainer" containerID="273fd290cc1ba07575271d9531d437b958370a221a54214044ece03be402385f" Jan 02 00:10:58 crc kubenswrapper[4766]: E0102 00:10:58.323819 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"273fd290cc1ba07575271d9531d437b958370a221a54214044ece03be402385f\": container with ID starting with 273fd290cc1ba07575271d9531d437b958370a221a54214044ece03be402385f not found: ID does not exist" containerID="273fd290cc1ba07575271d9531d437b958370a221a54214044ece03be402385f" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.323884 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"273fd290cc1ba07575271d9531d437b958370a221a54214044ece03be402385f"} err="failed to get container status \"273fd290cc1ba07575271d9531d437b958370a221a54214044ece03be402385f\": rpc error: code = NotFound desc = could not find container \"273fd290cc1ba07575271d9531d437b958370a221a54214044ece03be402385f\": container with ID starting with 273fd290cc1ba07575271d9531d437b958370a221a54214044ece03be402385f not found: ID does not exist" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.323916 4766 scope.go:117] "RemoveContainer" containerID="a9e81a11e314bb74d009a669061d0d36e8dc052991155cab265eb8bda6ca6f25" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.324365 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7prh8"] Jan 02 00:10:58 crc kubenswrapper[4766]: E0102 00:10:58.329245 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9e81a11e314bb74d009a669061d0d36e8dc052991155cab265eb8bda6ca6f25\": container with ID starting with a9e81a11e314bb74d009a669061d0d36e8dc052991155cab265eb8bda6ca6f25 not found: ID does not exist" containerID="a9e81a11e314bb74d009a669061d0d36e8dc052991155cab265eb8bda6ca6f25" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.329298 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9e81a11e314bb74d009a669061d0d36e8dc052991155cab265eb8bda6ca6f25"} err="failed to get container status \"a9e81a11e314bb74d009a669061d0d36e8dc052991155cab265eb8bda6ca6f25\": rpc error: code = NotFound desc = could not find container \"a9e81a11e314bb74d009a669061d0d36e8dc052991155cab265eb8bda6ca6f25\": container with ID starting with a9e81a11e314bb74d009a669061d0d36e8dc052991155cab265eb8bda6ca6f25 not found: ID does not exist" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.329326 4766 scope.go:117] "RemoveContainer" containerID="a2c853298d984527b6b9eaf7e0a632c2918be72eac7b8dbfbf0db880a626844f" Jan 02 00:10:58 crc kubenswrapper[4766]: E0102 00:10:58.330381 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2c853298d984527b6b9eaf7e0a632c2918be72eac7b8dbfbf0db880a626844f\": container with ID starting with a2c853298d984527b6b9eaf7e0a632c2918be72eac7b8dbfbf0db880a626844f not found: ID does not exist" containerID="a2c853298d984527b6b9eaf7e0a632c2918be72eac7b8dbfbf0db880a626844f" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.330412 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2c853298d984527b6b9eaf7e0a632c2918be72eac7b8dbfbf0db880a626844f"} err="failed to get container status \"a2c853298d984527b6b9eaf7e0a632c2918be72eac7b8dbfbf0db880a626844f\": rpc error: code = NotFound desc = could not find container \"a2c853298d984527b6b9eaf7e0a632c2918be72eac7b8dbfbf0db880a626844f\": container with ID starting with a2c853298d984527b6b9eaf7e0a632c2918be72eac7b8dbfbf0db880a626844f not found: ID does not exist" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.409035 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.413380 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bjfhq" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.428239 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-txcw7_ed7d2a87-a727-4174-ac4d-746b7d7a29fd/registry-server/0.log" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.428982 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-txcw7" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.438465 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4pfq5_52a3f025-cab4-4df2-851c-a3144fc4e9de/registry-server/0.log" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.441811 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4pfq5" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.539425 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-catalog-content\") pod \"ed7d2a87-a727-4174-ac4d-746b7d7a29fd\" (UID: \"ed7d2a87-a727-4174-ac4d-746b7d7a29fd\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.539471 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-utilities\") pod \"ed7d2a87-a727-4174-ac4d-746b7d7a29fd\" (UID: \"ed7d2a87-a727-4174-ac4d-746b7d7a29fd\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.539497 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptd2x\" (UniqueName: \"kubernetes.io/projected/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-kube-api-access-ptd2x\") pod \"ed7d2a87-a727-4174-ac4d-746b7d7a29fd\" (UID: \"ed7d2a87-a727-4174-ac4d-746b7d7a29fd\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.539567 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52a3f025-cab4-4df2-851c-a3144fc4e9de-utilities\") pod \"52a3f025-cab4-4df2-851c-a3144fc4e9de\" (UID: \"52a3f025-cab4-4df2-851c-a3144fc4e9de\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.539609 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c27e1f6d-5925-4ca0-89f2-f447afbf845f-marketplace-operator-metrics\") pod \"c27e1f6d-5925-4ca0-89f2-f447afbf845f\" (UID: \"c27e1f6d-5925-4ca0-89f2-f447afbf845f\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.539642 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-utilities\") pod \"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d\" (UID: \"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.539667 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-catalog-content\") pod \"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d\" (UID: \"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.539699 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g9j2q\" (UniqueName: \"kubernetes.io/projected/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-kube-api-access-g9j2q\") pod \"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d\" (UID: \"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.539739 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52a3f025-cab4-4df2-851c-a3144fc4e9de-catalog-content\") pod \"52a3f025-cab4-4df2-851c-a3144fc4e9de\" (UID: \"52a3f025-cab4-4df2-851c-a3144fc4e9de\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.539764 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvrpt\" (UniqueName: \"kubernetes.io/projected/52a3f025-cab4-4df2-851c-a3144fc4e9de-kube-api-access-kvrpt\") pod \"52a3f025-cab4-4df2-851c-a3144fc4e9de\" (UID: \"52a3f025-cab4-4df2-851c-a3144fc4e9de\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.539793 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c27e1f6d-5925-4ca0-89f2-f447afbf845f-marketplace-trusted-ca\") pod \"c27e1f6d-5925-4ca0-89f2-f447afbf845f\" (UID: \"c27e1f6d-5925-4ca0-89f2-f447afbf845f\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.539821 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dz5pf\" (UniqueName: \"kubernetes.io/projected/c27e1f6d-5925-4ca0-89f2-f447afbf845f-kube-api-access-dz5pf\") pod \"c27e1f6d-5925-4ca0-89f2-f447afbf845f\" (UID: \"c27e1f6d-5925-4ca0-89f2-f447afbf845f\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.540500 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-utilities" (OuterVolumeSpecName: "utilities") pod "ed7d2a87-a727-4174-ac4d-746b7d7a29fd" (UID: "ed7d2a87-a727-4174-ac4d-746b7d7a29fd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.540554 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52a3f025-cab4-4df2-851c-a3144fc4e9de-utilities" (OuterVolumeSpecName: "utilities") pod "52a3f025-cab4-4df2-851c-a3144fc4e9de" (UID: "52a3f025-cab4-4df2-851c-a3144fc4e9de"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.541373 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-utilities" (OuterVolumeSpecName: "utilities") pod "45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" (UID: "45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.544930 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-kube-api-access-g9j2q" (OuterVolumeSpecName: "kube-api-access-g9j2q") pod "45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" (UID: "45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d"). InnerVolumeSpecName "kube-api-access-g9j2q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.545367 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c27e1f6d-5925-4ca0-89f2-f447afbf845f-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "c27e1f6d-5925-4ca0-89f2-f447afbf845f" (UID: "c27e1f6d-5925-4ca0-89f2-f447afbf845f"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.545744 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-kube-api-access-ptd2x" (OuterVolumeSpecName: "kube-api-access-ptd2x") pod "ed7d2a87-a727-4174-ac4d-746b7d7a29fd" (UID: "ed7d2a87-a727-4174-ac4d-746b7d7a29fd"). InnerVolumeSpecName "kube-api-access-ptd2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.545986 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52a3f025-cab4-4df2-851c-a3144fc4e9de-kube-api-access-kvrpt" (OuterVolumeSpecName: "kube-api-access-kvrpt") pod "52a3f025-cab4-4df2-851c-a3144fc4e9de" (UID: "52a3f025-cab4-4df2-851c-a3144fc4e9de"). InnerVolumeSpecName "kube-api-access-kvrpt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.546250 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c27e1f6d-5925-4ca0-89f2-f447afbf845f-kube-api-access-dz5pf" (OuterVolumeSpecName: "kube-api-access-dz5pf") pod "c27e1f6d-5925-4ca0-89f2-f447afbf845f" (UID: "c27e1f6d-5925-4ca0-89f2-f447afbf845f"). InnerVolumeSpecName "kube-api-access-dz5pf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.563500 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c27e1f6d-5925-4ca0-89f2-f447afbf845f-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "c27e1f6d-5925-4ca0-89f2-f447afbf845f" (UID: "c27e1f6d-5925-4ca0-89f2-f447afbf845f"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.638545 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52a3f025-cab4-4df2-851c-a3144fc4e9de-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "52a3f025-cab4-4df2-851c-a3144fc4e9de" (UID: "52a3f025-cab4-4df2-851c-a3144fc4e9de"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.641176 4766 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52a3f025-cab4-4df2-851c-a3144fc4e9de-utilities\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.641221 4766 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c27e1f6d-5925-4ca0-89f2-f447afbf845f-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.641235 4766 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-utilities\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.641247 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g9j2q\" (UniqueName: \"kubernetes.io/projected/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-kube-api-access-g9j2q\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.641259 4766 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52a3f025-cab4-4df2-851c-a3144fc4e9de-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.641272 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvrpt\" (UniqueName: \"kubernetes.io/projected/52a3f025-cab4-4df2-851c-a3144fc4e9de-kube-api-access-kvrpt\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.641283 4766 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c27e1f6d-5925-4ca0-89f2-f447afbf845f-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.641295 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dz5pf\" (UniqueName: \"kubernetes.io/projected/c27e1f6d-5925-4ca0-89f2-f447afbf845f-kube-api-access-dz5pf\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.641306 4766 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-utilities\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.641318 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptd2x\" (UniqueName: \"kubernetes.io/projected/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-kube-api-access-ptd2x\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.682975 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c457j" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.686923 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r2cdb" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.730864 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed7d2a87-a727-4174-ac4d-746b7d7a29fd" (UID: "ed7d2a87-a727-4174-ac4d-746b7d7a29fd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.740151 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7prh8" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.743059 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c1f7ccd-9415-4560-968d-f20c5c44df58-catalog-content\") pod \"0c1f7ccd-9415-4560-968d-f20c5c44df58\" (UID: \"0c1f7ccd-9415-4560-968d-f20c5c44df58\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.743157 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8ppf\" (UniqueName: \"kubernetes.io/projected/0c1f7ccd-9415-4560-968d-f20c5c44df58-kube-api-access-m8ppf\") pod \"0c1f7ccd-9415-4560-968d-f20c5c44df58\" (UID: \"0c1f7ccd-9415-4560-968d-f20c5c44df58\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.743230 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c1f7ccd-9415-4560-968d-f20c5c44df58-utilities\") pod \"0c1f7ccd-9415-4560-968d-f20c5c44df58\" (UID: \"0c1f7ccd-9415-4560-968d-f20c5c44df58\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.743266 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-utilities\") pod \"d04a5cef-a04c-4451-9b0f-ba9dea4877f2\" (UID: \"d04a5cef-a04c-4451-9b0f-ba9dea4877f2\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.743292 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-catalog-content\") pod \"d04a5cef-a04c-4451-9b0f-ba9dea4877f2\" (UID: \"d04a5cef-a04c-4451-9b0f-ba9dea4877f2\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.743323 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lr74q\" (UniqueName: \"kubernetes.io/projected/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-kube-api-access-lr74q\") pod \"d04a5cef-a04c-4451-9b0f-ba9dea4877f2\" (UID: \"d04a5cef-a04c-4451-9b0f-ba9dea4877f2\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.743663 4766 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed7d2a87-a727-4174-ac4d-746b7d7a29fd-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.744578 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-utilities" (OuterVolumeSpecName: "utilities") pod "d04a5cef-a04c-4451-9b0f-ba9dea4877f2" (UID: "d04a5cef-a04c-4451-9b0f-ba9dea4877f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.748199 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c1f7ccd-9415-4560-968d-f20c5c44df58-utilities" (OuterVolumeSpecName: "utilities") pod "0c1f7ccd-9415-4560-968d-f20c5c44df58" (UID: "0c1f7ccd-9415-4560-968d-f20c5c44df58"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.752400 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c1f7ccd-9415-4560-968d-f20c5c44df58-kube-api-access-m8ppf" (OuterVolumeSpecName: "kube-api-access-m8ppf") pod "0c1f7ccd-9415-4560-968d-f20c5c44df58" (UID: "0c1f7ccd-9415-4560-968d-f20c5c44df58"). InnerVolumeSpecName "kube-api-access-m8ppf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.756212 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-kube-api-access-lr74q" (OuterVolumeSpecName: "kube-api-access-lr74q") pod "d04a5cef-a04c-4451-9b0f-ba9dea4877f2" (UID: "d04a5cef-a04c-4451-9b0f-ba9dea4877f2"). InnerVolumeSpecName "kube-api-access-lr74q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.759671 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" (UID: "45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.820258 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d04a5cef-a04c-4451-9b0f-ba9dea4877f2" (UID: "d04a5cef-a04c-4451-9b0f-ba9dea4877f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.842731 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c1f7ccd-9415-4560-968d-f20c5c44df58-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0c1f7ccd-9415-4560-968d-f20c5c44df58" (UID: "0c1f7ccd-9415-4560-968d-f20c5c44df58"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.844419 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2298405f-0595-45a3-9586-7ab0c713531b-utilities\") pod \"2298405f-0595-45a3-9586-7ab0c713531b\" (UID: \"2298405f-0595-45a3-9586-7ab0c713531b\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.844526 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2298405f-0595-45a3-9586-7ab0c713531b-catalog-content\") pod \"2298405f-0595-45a3-9586-7ab0c713531b\" (UID: \"2298405f-0595-45a3-9586-7ab0c713531b\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.844580 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s948f\" (UniqueName: \"kubernetes.io/projected/2298405f-0595-45a3-9586-7ab0c713531b-kube-api-access-s948f\") pod \"2298405f-0595-45a3-9586-7ab0c713531b\" (UID: \"2298405f-0595-45a3-9586-7ab0c713531b\") " Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.844859 4766 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c1f7ccd-9415-4560-968d-f20c5c44df58-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.844877 4766 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.844888 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8ppf\" (UniqueName: \"kubernetes.io/projected/0c1f7ccd-9415-4560-968d-f20c5c44df58-kube-api-access-m8ppf\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.844898 4766 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c1f7ccd-9415-4560-968d-f20c5c44df58-utilities\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.844906 4766 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-utilities\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.844914 4766 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.844922 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lr74q\" (UniqueName: \"kubernetes.io/projected/d04a5cef-a04c-4451-9b0f-ba9dea4877f2-kube-api-access-lr74q\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.845260 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2298405f-0595-45a3-9586-7ab0c713531b-utilities" (OuterVolumeSpecName: "utilities") pod "2298405f-0595-45a3-9586-7ab0c713531b" (UID: "2298405f-0595-45a3-9586-7ab0c713531b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.849066 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2298405f-0595-45a3-9586-7ab0c713531b-kube-api-access-s948f" (OuterVolumeSpecName: "kube-api-access-s948f") pod "2298405f-0595-45a3-9586-7ab0c713531b" (UID: "2298405f-0595-45a3-9586-7ab0c713531b"). InnerVolumeSpecName "kube-api-access-s948f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.917112 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2298405f-0595-45a3-9586-7ab0c713531b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2298405f-0595-45a3-9586-7ab0c713531b" (UID: "2298405f-0595-45a3-9586-7ab0c713531b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.946014 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s948f\" (UniqueName: \"kubernetes.io/projected/2298405f-0595-45a3-9586-7ab0c713531b-kube-api-access-s948f\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.946044 4766 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2298405f-0595-45a3-9586-7ab0c713531b-utilities\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:58 crc kubenswrapper[4766]: I0102 00:10:58.946061 4766 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2298405f-0595-45a3-9586-7ab0c713531b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.000019 4766 generic.go:334] "Generic (PLEG): container finished" podID="0c1f7ccd-9415-4560-968d-f20c5c44df58" containerID="e0049eae8534d2267dc4d0cd55b93c7b66df176ffca906fea39adea194daf1f0" exitCode=0 Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.000066 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c457j" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.000093 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c457j" event={"ID":"0c1f7ccd-9415-4560-968d-f20c5c44df58","Type":"ContainerDied","Data":"e0049eae8534d2267dc4d0cd55b93c7b66df176ffca906fea39adea194daf1f0"} Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.000118 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c457j" event={"ID":"0c1f7ccd-9415-4560-968d-f20c5c44df58","Type":"ContainerDied","Data":"dd6aaafbd47ed961bb9ce9c91f9b1e3b1e9c57d010a1a57dc05b41ab6e9589c9"} Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.000141 4766 scope.go:117] "RemoveContainer" containerID="e0049eae8534d2267dc4d0cd55b93c7b66df176ffca906fea39adea194daf1f0" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.002388 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-g8fpt" event={"ID":"82bbd02f-f842-47cf-bea4-2dcd3e78a02e","Type":"ContainerStarted","Data":"237d8dcadc188ece9510352a245411b2746bfe8b6dd52f3452436637240a6dcd"} Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.002450 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-g8fpt" event={"ID":"82bbd02f-f842-47cf-bea4-2dcd3e78a02e","Type":"ContainerStarted","Data":"af38c484a2b45cd482e48bdd075f4c5da2cea99dd58269ca04745f36d053ca83"} Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.002483 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-g8fpt" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.003652 4766 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-g8fpt container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.64:8080/healthz\": dial tcp 10.217.0.64:8080: connect: connection refused" start-of-body= Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.003828 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-g8fpt" podUID="82bbd02f-f842-47cf-bea4-2dcd3e78a02e" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.64:8080/healthz\": dial tcp 10.217.0.64:8080: connect: connection refused" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.006911 4766 generic.go:334] "Generic (PLEG): container finished" podID="2298405f-0595-45a3-9586-7ab0c713531b" containerID="e1089b741cb0b75dc2fca3fb03ee9747a978a4f8af4a6baeb53567a178a5fd95" exitCode=0 Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.006982 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7prh8" event={"ID":"2298405f-0595-45a3-9586-7ab0c713531b","Type":"ContainerDied","Data":"e1089b741cb0b75dc2fca3fb03ee9747a978a4f8af4a6baeb53567a178a5fd95"} Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.007011 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7prh8" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.007014 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7prh8" event={"ID":"2298405f-0595-45a3-9586-7ab0c713531b","Type":"ContainerDied","Data":"29236b6a2a3a6d265eef7e4590c1a392666f7f2c96a12acfb11896a3acc9f007"} Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.009181 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bjfhq" event={"ID":"45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d","Type":"ContainerDied","Data":"fbe8753f9f5150c90c403c79a7ce0b90d25c6a1a7506ad472a43dcbbe4bfc1ee"} Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.009446 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bjfhq" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.012051 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4pfq5_52a3f025-cab4-4df2-851c-a3144fc4e9de/registry-server/0.log" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.013088 4766 generic.go:334] "Generic (PLEG): container finished" podID="52a3f025-cab4-4df2-851c-a3144fc4e9de" containerID="646229f4622c1eac4e6dffd202c7e2bc35cfcff0144225664840d6d436143390" exitCode=1 Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.013147 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4pfq5" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.013182 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pfq5" event={"ID":"52a3f025-cab4-4df2-851c-a3144fc4e9de","Type":"ContainerDied","Data":"646229f4622c1eac4e6dffd202c7e2bc35cfcff0144225664840d6d436143390"} Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.013219 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pfq5" event={"ID":"52a3f025-cab4-4df2-851c-a3144fc4e9de","Type":"ContainerDied","Data":"32272cdc0c39b98cb6562b6e1cd2e3d0ac9623074dd539241a307e0a13022caf"} Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.018054 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-txcw7_ed7d2a87-a727-4174-ac4d-746b7d7a29fd/registry-server/0.log" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.023022 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-txcw7" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.023135 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-txcw7" event={"ID":"ed7d2a87-a727-4174-ac4d-746b7d7a29fd","Type":"ContainerDied","Data":"3f67b829109217e7e868ff876b93b9ca5047f51d2fd5370941c468da7bc0ca8d"} Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.023604 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-g8fpt" podStartSLOduration=3.023593569 podStartE2EDuration="3.023593569s" podCreationTimestamp="2026-01-02 00:10:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:10:59.020740202 +0000 UTC m=+233.819227356" watchObservedRunningTime="2026-01-02 00:10:59.023593569 +0000 UTC m=+233.822080723" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.027067 4766 scope.go:117] "RemoveContainer" containerID="e811c49cb37624838e8c92de434c42d1160792ef1e03e03a8197ad7810cdc450" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.030793 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kmkx9_c5f043c1-da6b-433d-8db2-47b8d31ab74c/registry-server/0.log" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.031787 4766 generic.go:334] "Generic (PLEG): container finished" podID="c5f043c1-da6b-433d-8db2-47b8d31ab74c" containerID="21bdb9499a8a435aea4badd88fe533394542a0a3a8437582000b55a23c25dee7" exitCode=1 Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.031886 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kmkx9" event={"ID":"c5f043c1-da6b-433d-8db2-47b8d31ab74c","Type":"ContainerDied","Data":"21bdb9499a8a435aea4badd88fe533394542a0a3a8437582000b55a23c25dee7"} Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.036379 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.036531 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qlzhc" event={"ID":"c27e1f6d-5925-4ca0-89f2-f447afbf845f","Type":"ContainerDied","Data":"dac2740a9d23a0d280670afc5c9de0f651f8c0e9d02a8c3e2e036bf59914c247"} Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.039602 4766 generic.go:334] "Generic (PLEG): container finished" podID="d04a5cef-a04c-4451-9b0f-ba9dea4877f2" containerID="97ab39af3402bc54d16c1d84fb4df1267890e99d41d5d488b4d88cd80703bf44" exitCode=0 Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.039889 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r2cdb" event={"ID":"d04a5cef-a04c-4451-9b0f-ba9dea4877f2","Type":"ContainerDied","Data":"97ab39af3402bc54d16c1d84fb4df1267890e99d41d5d488b4d88cd80703bf44"} Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.039945 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r2cdb" event={"ID":"d04a5cef-a04c-4451-9b0f-ba9dea4877f2","Type":"ContainerDied","Data":"b5aca1d39e3cdd8cbbf2c1eca98f58ea4e164419d6918f3834035cb39b572067"} Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.040303 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r2cdb" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.059317 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c457j"] Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.072027 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-c457j"] Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.080239 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7prh8"] Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.084154 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7prh8"] Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.087778 4766 scope.go:117] "RemoveContainer" containerID="74b7c8b8d7a4af366ca427acc34a769a7581e37ffec6379bd531d9b7016a6c1b" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.114641 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bjfhq"] Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.118592 4766 scope.go:117] "RemoveContainer" containerID="e0049eae8534d2267dc4d0cd55b93c7b66df176ffca906fea39adea194daf1f0" Jan 02 00:10:59 crc kubenswrapper[4766]: E0102 00:10:59.120058 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0049eae8534d2267dc4d0cd55b93c7b66df176ffca906fea39adea194daf1f0\": container with ID starting with e0049eae8534d2267dc4d0cd55b93c7b66df176ffca906fea39adea194daf1f0 not found: ID does not exist" containerID="e0049eae8534d2267dc4d0cd55b93c7b66df176ffca906fea39adea194daf1f0" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.120112 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0049eae8534d2267dc4d0cd55b93c7b66df176ffca906fea39adea194daf1f0"} err="failed to get container status \"e0049eae8534d2267dc4d0cd55b93c7b66df176ffca906fea39adea194daf1f0\": rpc error: code = NotFound desc = could not find container \"e0049eae8534d2267dc4d0cd55b93c7b66df176ffca906fea39adea194daf1f0\": container with ID starting with e0049eae8534d2267dc4d0cd55b93c7b66df176ffca906fea39adea194daf1f0 not found: ID does not exist" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.120164 4766 scope.go:117] "RemoveContainer" containerID="e811c49cb37624838e8c92de434c42d1160792ef1e03e03a8197ad7810cdc450" Jan 02 00:10:59 crc kubenswrapper[4766]: E0102 00:10:59.120976 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e811c49cb37624838e8c92de434c42d1160792ef1e03e03a8197ad7810cdc450\": container with ID starting with e811c49cb37624838e8c92de434c42d1160792ef1e03e03a8197ad7810cdc450 not found: ID does not exist" containerID="e811c49cb37624838e8c92de434c42d1160792ef1e03e03a8197ad7810cdc450" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.121011 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e811c49cb37624838e8c92de434c42d1160792ef1e03e03a8197ad7810cdc450"} err="failed to get container status \"e811c49cb37624838e8c92de434c42d1160792ef1e03e03a8197ad7810cdc450\": rpc error: code = NotFound desc = could not find container \"e811c49cb37624838e8c92de434c42d1160792ef1e03e03a8197ad7810cdc450\": container with ID starting with e811c49cb37624838e8c92de434c42d1160792ef1e03e03a8197ad7810cdc450 not found: ID does not exist" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.121031 4766 scope.go:117] "RemoveContainer" containerID="74b7c8b8d7a4af366ca427acc34a769a7581e37ffec6379bd531d9b7016a6c1b" Jan 02 00:10:59 crc kubenswrapper[4766]: E0102 00:10:59.121286 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74b7c8b8d7a4af366ca427acc34a769a7581e37ffec6379bd531d9b7016a6c1b\": container with ID starting with 74b7c8b8d7a4af366ca427acc34a769a7581e37ffec6379bd531d9b7016a6c1b not found: ID does not exist" containerID="74b7c8b8d7a4af366ca427acc34a769a7581e37ffec6379bd531d9b7016a6c1b" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.121307 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74b7c8b8d7a4af366ca427acc34a769a7581e37ffec6379bd531d9b7016a6c1b"} err="failed to get container status \"74b7c8b8d7a4af366ca427acc34a769a7581e37ffec6379bd531d9b7016a6c1b\": rpc error: code = NotFound desc = could not find container \"74b7c8b8d7a4af366ca427acc34a769a7581e37ffec6379bd531d9b7016a6c1b\": container with ID starting with 74b7c8b8d7a4af366ca427acc34a769a7581e37ffec6379bd531d9b7016a6c1b not found: ID does not exist" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.121323 4766 scope.go:117] "RemoveContainer" containerID="e1089b741cb0b75dc2fca3fb03ee9747a978a4f8af4a6baeb53567a178a5fd95" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.145134 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bjfhq"] Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.153327 4766 scope.go:117] "RemoveContainer" containerID="0feb848ebf5d3459059a696026c637a61861bf53ba5ebad3a916148f775cf550" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.155852 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-txcw7"] Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.176683 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-txcw7"] Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.179072 4766 scope.go:117] "RemoveContainer" containerID="9ae267a4b2fd6bfb396e19c5c437fafdf20848010a79bbf7a00af4cbe682f017" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.185111 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r2cdb"] Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.188040 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-r2cdb"] Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.191001 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4pfq5"] Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.197730 4766 scope.go:117] "RemoveContainer" containerID="e1089b741cb0b75dc2fca3fb03ee9747a978a4f8af4a6baeb53567a178a5fd95" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.198914 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4pfq5"] Jan 02 00:10:59 crc kubenswrapper[4766]: E0102 00:10:59.201459 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1089b741cb0b75dc2fca3fb03ee9747a978a4f8af4a6baeb53567a178a5fd95\": container with ID starting with e1089b741cb0b75dc2fca3fb03ee9747a978a4f8af4a6baeb53567a178a5fd95 not found: ID does not exist" containerID="e1089b741cb0b75dc2fca3fb03ee9747a978a4f8af4a6baeb53567a178a5fd95" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.201550 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1089b741cb0b75dc2fca3fb03ee9747a978a4f8af4a6baeb53567a178a5fd95"} err="failed to get container status \"e1089b741cb0b75dc2fca3fb03ee9747a978a4f8af4a6baeb53567a178a5fd95\": rpc error: code = NotFound desc = could not find container \"e1089b741cb0b75dc2fca3fb03ee9747a978a4f8af4a6baeb53567a178a5fd95\": container with ID starting with e1089b741cb0b75dc2fca3fb03ee9747a978a4f8af4a6baeb53567a178a5fd95 not found: ID does not exist" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.201596 4766 scope.go:117] "RemoveContainer" containerID="0feb848ebf5d3459059a696026c637a61861bf53ba5ebad3a916148f775cf550" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.204963 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qlzhc"] Jan 02 00:10:59 crc kubenswrapper[4766]: E0102 00:10:59.205290 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0feb848ebf5d3459059a696026c637a61861bf53ba5ebad3a916148f775cf550\": container with ID starting with 0feb848ebf5d3459059a696026c637a61861bf53ba5ebad3a916148f775cf550 not found: ID does not exist" containerID="0feb848ebf5d3459059a696026c637a61861bf53ba5ebad3a916148f775cf550" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.205317 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0feb848ebf5d3459059a696026c637a61861bf53ba5ebad3a916148f775cf550"} err="failed to get container status \"0feb848ebf5d3459059a696026c637a61861bf53ba5ebad3a916148f775cf550\": rpc error: code = NotFound desc = could not find container \"0feb848ebf5d3459059a696026c637a61861bf53ba5ebad3a916148f775cf550\": container with ID starting with 0feb848ebf5d3459059a696026c637a61861bf53ba5ebad3a916148f775cf550 not found: ID does not exist" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.205338 4766 scope.go:117] "RemoveContainer" containerID="9ae267a4b2fd6bfb396e19c5c437fafdf20848010a79bbf7a00af4cbe682f017" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.207576 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qlzhc"] Jan 02 00:10:59 crc kubenswrapper[4766]: E0102 00:10:59.208160 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ae267a4b2fd6bfb396e19c5c437fafdf20848010a79bbf7a00af4cbe682f017\": container with ID starting with 9ae267a4b2fd6bfb396e19c5c437fafdf20848010a79bbf7a00af4cbe682f017 not found: ID does not exist" containerID="9ae267a4b2fd6bfb396e19c5c437fafdf20848010a79bbf7a00af4cbe682f017" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.208212 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ae267a4b2fd6bfb396e19c5c437fafdf20848010a79bbf7a00af4cbe682f017"} err="failed to get container status \"9ae267a4b2fd6bfb396e19c5c437fafdf20848010a79bbf7a00af4cbe682f017\": rpc error: code = NotFound desc = could not find container \"9ae267a4b2fd6bfb396e19c5c437fafdf20848010a79bbf7a00af4cbe682f017\": container with ID starting with 9ae267a4b2fd6bfb396e19c5c437fafdf20848010a79bbf7a00af4cbe682f017 not found: ID does not exist" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.208237 4766 scope.go:117] "RemoveContainer" containerID="186f54861fecf54c118749b7ce1225dbcee76df84492e37e26e8822c341e2a09" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.269303 4766 scope.go:117] "RemoveContainer" containerID="8ed8c74dd9458cf7ce8d644fe3a34ce1ad9c53b83c05bd8c742074016f139fef" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.313111 4766 scope.go:117] "RemoveContainer" containerID="90be7d0344c3e88e26f8c5396641682cfaa2e12b2e08df62012ed90d31247556" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.362042 4766 scope.go:117] "RemoveContainer" containerID="646229f4622c1eac4e6dffd202c7e2bc35cfcff0144225664840d6d436143390" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.383119 4766 scope.go:117] "RemoveContainer" containerID="3d05f34110e01546e2b50029c653ac7c19ef33f3d727899f8ed01222d5da5807" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.403136 4766 scope.go:117] "RemoveContainer" containerID="3043c8962d1b6d345cb94590278f434967dad21e75bebb1baed7040b8575043a" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.423805 4766 scope.go:117] "RemoveContainer" containerID="646229f4622c1eac4e6dffd202c7e2bc35cfcff0144225664840d6d436143390" Jan 02 00:10:59 crc kubenswrapper[4766]: E0102 00:10:59.435419 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"646229f4622c1eac4e6dffd202c7e2bc35cfcff0144225664840d6d436143390\": container with ID starting with 646229f4622c1eac4e6dffd202c7e2bc35cfcff0144225664840d6d436143390 not found: ID does not exist" containerID="646229f4622c1eac4e6dffd202c7e2bc35cfcff0144225664840d6d436143390" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.435471 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"646229f4622c1eac4e6dffd202c7e2bc35cfcff0144225664840d6d436143390"} err="failed to get container status \"646229f4622c1eac4e6dffd202c7e2bc35cfcff0144225664840d6d436143390\": rpc error: code = NotFound desc = could not find container \"646229f4622c1eac4e6dffd202c7e2bc35cfcff0144225664840d6d436143390\": container with ID starting with 646229f4622c1eac4e6dffd202c7e2bc35cfcff0144225664840d6d436143390 not found: ID does not exist" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.435505 4766 scope.go:117] "RemoveContainer" containerID="3d05f34110e01546e2b50029c653ac7c19ef33f3d727899f8ed01222d5da5807" Jan 02 00:10:59 crc kubenswrapper[4766]: E0102 00:10:59.435879 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d05f34110e01546e2b50029c653ac7c19ef33f3d727899f8ed01222d5da5807\": container with ID starting with 3d05f34110e01546e2b50029c653ac7c19ef33f3d727899f8ed01222d5da5807 not found: ID does not exist" containerID="3d05f34110e01546e2b50029c653ac7c19ef33f3d727899f8ed01222d5da5807" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.435960 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d05f34110e01546e2b50029c653ac7c19ef33f3d727899f8ed01222d5da5807"} err="failed to get container status \"3d05f34110e01546e2b50029c653ac7c19ef33f3d727899f8ed01222d5da5807\": rpc error: code = NotFound desc = could not find container \"3d05f34110e01546e2b50029c653ac7c19ef33f3d727899f8ed01222d5da5807\": container with ID starting with 3d05f34110e01546e2b50029c653ac7c19ef33f3d727899f8ed01222d5da5807 not found: ID does not exist" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.435990 4766 scope.go:117] "RemoveContainer" containerID="3043c8962d1b6d345cb94590278f434967dad21e75bebb1baed7040b8575043a" Jan 02 00:10:59 crc kubenswrapper[4766]: E0102 00:10:59.436711 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3043c8962d1b6d345cb94590278f434967dad21e75bebb1baed7040b8575043a\": container with ID starting with 3043c8962d1b6d345cb94590278f434967dad21e75bebb1baed7040b8575043a not found: ID does not exist" containerID="3043c8962d1b6d345cb94590278f434967dad21e75bebb1baed7040b8575043a" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.436780 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3043c8962d1b6d345cb94590278f434967dad21e75bebb1baed7040b8575043a"} err="failed to get container status \"3043c8962d1b6d345cb94590278f434967dad21e75bebb1baed7040b8575043a\": rpc error: code = NotFound desc = could not find container \"3043c8962d1b6d345cb94590278f434967dad21e75bebb1baed7040b8575043a\": container with ID starting with 3043c8962d1b6d345cb94590278f434967dad21e75bebb1baed7040b8575043a not found: ID does not exist" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.436803 4766 scope.go:117] "RemoveContainer" containerID="4bba10727e36c8cbeb45ba5eb6d336d783e8bb0579cee8a3ef1b03ca83c13dab" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.471967 4766 scope.go:117] "RemoveContainer" containerID="80c05e285dd1bec5b9c4b9820db74a7eb11353fac6d654b24a8ec7780e865352" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.490997 4766 scope.go:117] "RemoveContainer" containerID="97d70c20e19bb593800ecbfbed35d2b90096f8dea5af56666467e9bd68537428" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.518324 4766 scope.go:117] "RemoveContainer" containerID="d2330ceeb23365890361dd88cc88eda1b72fe700da0c86ecbb54523ef42aef76" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.537220 4766 scope.go:117] "RemoveContainer" containerID="97ab39af3402bc54d16c1d84fb4df1267890e99d41d5d488b4d88cd80703bf44" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.541782 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c1f7ccd-9415-4560-968d-f20c5c44df58" path="/var/lib/kubelet/pods/0c1f7ccd-9415-4560-968d-f20c5c44df58/volumes" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.542761 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2298405f-0595-45a3-9586-7ab0c713531b" path="/var/lib/kubelet/pods/2298405f-0595-45a3-9586-7ab0c713531b/volumes" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.543654 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="296488e0-7f00-413d-87fb-d8cecb1b186c" path="/var/lib/kubelet/pods/296488e0-7f00-413d-87fb-d8cecb1b186c/volumes" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.544976 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" path="/var/lib/kubelet/pods/45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d/volumes" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.545622 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52a3f025-cab4-4df2-851c-a3144fc4e9de" path="/var/lib/kubelet/pods/52a3f025-cab4-4df2-851c-a3144fc4e9de/volumes" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.546722 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c27e1f6d-5925-4ca0-89f2-f447afbf845f" path="/var/lib/kubelet/pods/c27e1f6d-5925-4ca0-89f2-f447afbf845f/volumes" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.547222 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d04a5cef-a04c-4451-9b0f-ba9dea4877f2" path="/var/lib/kubelet/pods/d04a5cef-a04c-4451-9b0f-ba9dea4877f2/volumes" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.548064 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed7d2a87-a727-4174-ac4d-746b7d7a29fd" path="/var/lib/kubelet/pods/ed7d2a87-a727-4174-ac4d-746b7d7a29fd/volumes" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.551755 4766 scope.go:117] "RemoveContainer" containerID="c7222d7e76060d52cc36bae838c753e5e997677a1457dca68ab414feaf6f610e" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.566629 4766 scope.go:117] "RemoveContainer" containerID="e6963ccde9abcd12603d54ff0dec2bbebe24cf4764349ede9d8e032ac9c6abdd" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.577823 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kmkx9_c5f043c1-da6b-433d-8db2-47b8d31ab74c/registry-server/0.log" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.578561 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kmkx9" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.583835 4766 scope.go:117] "RemoveContainer" containerID="97ab39af3402bc54d16c1d84fb4df1267890e99d41d5d488b4d88cd80703bf44" Jan 02 00:10:59 crc kubenswrapper[4766]: E0102 00:10:59.584250 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97ab39af3402bc54d16c1d84fb4df1267890e99d41d5d488b4d88cd80703bf44\": container with ID starting with 97ab39af3402bc54d16c1d84fb4df1267890e99d41d5d488b4d88cd80703bf44 not found: ID does not exist" containerID="97ab39af3402bc54d16c1d84fb4df1267890e99d41d5d488b4d88cd80703bf44" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.584278 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97ab39af3402bc54d16c1d84fb4df1267890e99d41d5d488b4d88cd80703bf44"} err="failed to get container status \"97ab39af3402bc54d16c1d84fb4df1267890e99d41d5d488b4d88cd80703bf44\": rpc error: code = NotFound desc = could not find container \"97ab39af3402bc54d16c1d84fb4df1267890e99d41d5d488b4d88cd80703bf44\": container with ID starting with 97ab39af3402bc54d16c1d84fb4df1267890e99d41d5d488b4d88cd80703bf44 not found: ID does not exist" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.584297 4766 scope.go:117] "RemoveContainer" containerID="c7222d7e76060d52cc36bae838c753e5e997677a1457dca68ab414feaf6f610e" Jan 02 00:10:59 crc kubenswrapper[4766]: E0102 00:10:59.584500 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7222d7e76060d52cc36bae838c753e5e997677a1457dca68ab414feaf6f610e\": container with ID starting with c7222d7e76060d52cc36bae838c753e5e997677a1457dca68ab414feaf6f610e not found: ID does not exist" containerID="c7222d7e76060d52cc36bae838c753e5e997677a1457dca68ab414feaf6f610e" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.584523 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7222d7e76060d52cc36bae838c753e5e997677a1457dca68ab414feaf6f610e"} err="failed to get container status \"c7222d7e76060d52cc36bae838c753e5e997677a1457dca68ab414feaf6f610e\": rpc error: code = NotFound desc = could not find container \"c7222d7e76060d52cc36bae838c753e5e997677a1457dca68ab414feaf6f610e\": container with ID starting with c7222d7e76060d52cc36bae838c753e5e997677a1457dca68ab414feaf6f610e not found: ID does not exist" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.584540 4766 scope.go:117] "RemoveContainer" containerID="e6963ccde9abcd12603d54ff0dec2bbebe24cf4764349ede9d8e032ac9c6abdd" Jan 02 00:10:59 crc kubenswrapper[4766]: E0102 00:10:59.584737 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6963ccde9abcd12603d54ff0dec2bbebe24cf4764349ede9d8e032ac9c6abdd\": container with ID starting with e6963ccde9abcd12603d54ff0dec2bbebe24cf4764349ede9d8e032ac9c6abdd not found: ID does not exist" containerID="e6963ccde9abcd12603d54ff0dec2bbebe24cf4764349ede9d8e032ac9c6abdd" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.584757 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6963ccde9abcd12603d54ff0dec2bbebe24cf4764349ede9d8e032ac9c6abdd"} err="failed to get container status \"e6963ccde9abcd12603d54ff0dec2bbebe24cf4764349ede9d8e032ac9c6abdd\": rpc error: code = NotFound desc = could not find container \"e6963ccde9abcd12603d54ff0dec2bbebe24cf4764349ede9d8e032ac9c6abdd\": container with ID starting with e6963ccde9abcd12603d54ff0dec2bbebe24cf4764349ede9d8e032ac9c6abdd not found: ID does not exist" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.655277 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8sf6\" (UniqueName: \"kubernetes.io/projected/c5f043c1-da6b-433d-8db2-47b8d31ab74c-kube-api-access-l8sf6\") pod \"c5f043c1-da6b-433d-8db2-47b8d31ab74c\" (UID: \"c5f043c1-da6b-433d-8db2-47b8d31ab74c\") " Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.655398 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5f043c1-da6b-433d-8db2-47b8d31ab74c-utilities\") pod \"c5f043c1-da6b-433d-8db2-47b8d31ab74c\" (UID: \"c5f043c1-da6b-433d-8db2-47b8d31ab74c\") " Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.655433 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5f043c1-da6b-433d-8db2-47b8d31ab74c-catalog-content\") pod \"c5f043c1-da6b-433d-8db2-47b8d31ab74c\" (UID: \"c5f043c1-da6b-433d-8db2-47b8d31ab74c\") " Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.660351 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5f043c1-da6b-433d-8db2-47b8d31ab74c-utilities" (OuterVolumeSpecName: "utilities") pod "c5f043c1-da6b-433d-8db2-47b8d31ab74c" (UID: "c5f043c1-da6b-433d-8db2-47b8d31ab74c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.660275 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5f043c1-da6b-433d-8db2-47b8d31ab74c-kube-api-access-l8sf6" (OuterVolumeSpecName: "kube-api-access-l8sf6") pod "c5f043c1-da6b-433d-8db2-47b8d31ab74c" (UID: "c5f043c1-da6b-433d-8db2-47b8d31ab74c"). InnerVolumeSpecName "kube-api-access-l8sf6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.679439 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5f043c1-da6b-433d-8db2-47b8d31ab74c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c5f043c1-da6b-433d-8db2-47b8d31ab74c" (UID: "c5f043c1-da6b-433d-8db2-47b8d31ab74c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.757411 4766 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5f043c1-da6b-433d-8db2-47b8d31ab74c-utilities\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.757462 4766 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5f043c1-da6b-433d-8db2-47b8d31ab74c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 02 00:10:59 crc kubenswrapper[4766]: I0102 00:10:59.757476 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8sf6\" (UniqueName: \"kubernetes.io/projected/c5f043c1-da6b-433d-8db2-47b8d31ab74c-kube-api-access-l8sf6\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.047966 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kmkx9_c5f043c1-da6b-433d-8db2-47b8d31ab74c/registry-server/0.log" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.048649 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kmkx9" event={"ID":"c5f043c1-da6b-433d-8db2-47b8d31ab74c","Type":"ContainerDied","Data":"2b31100575ae9bd08e3f3b1aa7046ae0e8a73742fd5ed47d755379797887ea8e"} Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.048716 4766 scope.go:117] "RemoveContainer" containerID="21bdb9499a8a435aea4badd88fe533394542a0a3a8437582000b55a23c25dee7" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.048739 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kmkx9" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.057990 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-g8fpt" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.066307 4766 scope.go:117] "RemoveContainer" containerID="9bbd2e830a5218dec2c9bc2958ca9d2b69c51eb81c680c26a591a2961a0c3aad" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.086863 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kmkx9"] Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.095307 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kmkx9"] Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.111549 4766 scope.go:117] "RemoveContainer" containerID="f6461bc691570b6e6c4125bf5039f74afcdf0bf920fdf22d4ff4fd1c4972c5cf" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.722819 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5tjl2"] Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723115 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2298405f-0595-45a3-9586-7ab0c713531b" containerName="extract-utilities" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723130 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="2298405f-0595-45a3-9586-7ab0c713531b" containerName="extract-utilities" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723140 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52a3f025-cab4-4df2-851c-a3144fc4e9de" containerName="extract-utilities" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723148 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="52a3f025-cab4-4df2-851c-a3144fc4e9de" containerName="extract-utilities" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723162 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d04a5cef-a04c-4451-9b0f-ba9dea4877f2" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723172 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="d04a5cef-a04c-4451-9b0f-ba9dea4877f2" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723181 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="296488e0-7f00-413d-87fb-d8cecb1b186c" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723188 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="296488e0-7f00-413d-87fb-d8cecb1b186c" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723199 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5f043c1-da6b-433d-8db2-47b8d31ab74c" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723207 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5f043c1-da6b-433d-8db2-47b8d31ab74c" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723216 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" containerName="extract-content" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723223 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" containerName="extract-content" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723231 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed7d2a87-a727-4174-ac4d-746b7d7a29fd" containerName="extract-content" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723239 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed7d2a87-a727-4174-ac4d-746b7d7a29fd" containerName="extract-content" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723249 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52a3f025-cab4-4df2-851c-a3144fc4e9de" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723257 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="52a3f025-cab4-4df2-851c-a3144fc4e9de" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723269 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2298405f-0595-45a3-9586-7ab0c713531b" containerName="extract-content" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723276 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="2298405f-0595-45a3-9586-7ab0c713531b" containerName="extract-content" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723284 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d04a5cef-a04c-4451-9b0f-ba9dea4877f2" containerName="extract-utilities" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723291 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="d04a5cef-a04c-4451-9b0f-ba9dea4877f2" containerName="extract-utilities" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723301 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed7d2a87-a727-4174-ac4d-746b7d7a29fd" containerName="extract-utilities" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723309 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed7d2a87-a727-4174-ac4d-746b7d7a29fd" containerName="extract-utilities" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723315 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2298405f-0595-45a3-9586-7ab0c713531b" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723322 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="2298405f-0595-45a3-9586-7ab0c713531b" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723331 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="296488e0-7f00-413d-87fb-d8cecb1b186c" containerName="extract-utilities" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723337 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="296488e0-7f00-413d-87fb-d8cecb1b186c" containerName="extract-utilities" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723344 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="296488e0-7f00-413d-87fb-d8cecb1b186c" containerName="extract-content" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723350 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="296488e0-7f00-413d-87fb-d8cecb1b186c" containerName="extract-content" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723358 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" containerName="extract-utilities" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723364 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" containerName="extract-utilities" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723370 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed7d2a87-a727-4174-ac4d-746b7d7a29fd" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723376 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed7d2a87-a727-4174-ac4d-746b7d7a29fd" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723384 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52a3f025-cab4-4df2-851c-a3144fc4e9de" containerName="extract-content" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723390 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="52a3f025-cab4-4df2-851c-a3144fc4e9de" containerName="extract-content" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723397 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5f043c1-da6b-433d-8db2-47b8d31ab74c" containerName="extract-content" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723403 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5f043c1-da6b-433d-8db2-47b8d31ab74c" containerName="extract-content" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723412 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5f043c1-da6b-433d-8db2-47b8d31ab74c" containerName="extract-utilities" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723417 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5f043c1-da6b-433d-8db2-47b8d31ab74c" containerName="extract-utilities" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723424 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c1f7ccd-9415-4560-968d-f20c5c44df58" containerName="extract-content" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723431 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c1f7ccd-9415-4560-968d-f20c5c44df58" containerName="extract-content" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723438 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d04a5cef-a04c-4451-9b0f-ba9dea4877f2" containerName="extract-content" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723443 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="d04a5cef-a04c-4451-9b0f-ba9dea4877f2" containerName="extract-content" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723451 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c1f7ccd-9415-4560-968d-f20c5c44df58" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723456 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c1f7ccd-9415-4560-968d-f20c5c44df58" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723463 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c27e1f6d-5925-4ca0-89f2-f447afbf845f" containerName="marketplace-operator" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723470 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="c27e1f6d-5925-4ca0-89f2-f447afbf845f" containerName="marketplace-operator" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723515 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c1f7ccd-9415-4560-968d-f20c5c44df58" containerName="extract-utilities" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723521 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c1f7ccd-9415-4560-968d-f20c5c44df58" containerName="extract-utilities" Jan 02 00:11:00 crc kubenswrapper[4766]: E0102 00:11:00.723527 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723533 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723641 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c1f7ccd-9415-4560-968d-f20c5c44df58" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723650 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="45d4fb2f-a3b7-4135-8dcf-cdaba0aef58d" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723658 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed7d2a87-a727-4174-ac4d-746b7d7a29fd" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723667 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5f043c1-da6b-433d-8db2-47b8d31ab74c" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723674 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="c27e1f6d-5925-4ca0-89f2-f447afbf845f" containerName="marketplace-operator" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723682 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="d04a5cef-a04c-4451-9b0f-ba9dea4877f2" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723691 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="2298405f-0595-45a3-9586-7ab0c713531b" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723699 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="296488e0-7f00-413d-87fb-d8cecb1b186c" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.723708 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="52a3f025-cab4-4df2-851c-a3144fc4e9de" containerName="registry-server" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.724742 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5tjl2" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.732893 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.736735 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5tjl2"] Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.769640 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d021b9f-72be-49d6-9fc8-5cc94e6e33ca-utilities\") pod \"redhat-operators-5tjl2\" (UID: \"0d021b9f-72be-49d6-9fc8-5cc94e6e33ca\") " pod="openshift-marketplace/redhat-operators-5tjl2" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.769696 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhpmz\" (UniqueName: \"kubernetes.io/projected/0d021b9f-72be-49d6-9fc8-5cc94e6e33ca-kube-api-access-rhpmz\") pod \"redhat-operators-5tjl2\" (UID: \"0d021b9f-72be-49d6-9fc8-5cc94e6e33ca\") " pod="openshift-marketplace/redhat-operators-5tjl2" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.769824 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d021b9f-72be-49d6-9fc8-5cc94e6e33ca-catalog-content\") pod \"redhat-operators-5tjl2\" (UID: \"0d021b9f-72be-49d6-9fc8-5cc94e6e33ca\") " pod="openshift-marketplace/redhat-operators-5tjl2" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.870668 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d021b9f-72be-49d6-9fc8-5cc94e6e33ca-utilities\") pod \"redhat-operators-5tjl2\" (UID: \"0d021b9f-72be-49d6-9fc8-5cc94e6e33ca\") " pod="openshift-marketplace/redhat-operators-5tjl2" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.870714 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhpmz\" (UniqueName: \"kubernetes.io/projected/0d021b9f-72be-49d6-9fc8-5cc94e6e33ca-kube-api-access-rhpmz\") pod \"redhat-operators-5tjl2\" (UID: \"0d021b9f-72be-49d6-9fc8-5cc94e6e33ca\") " pod="openshift-marketplace/redhat-operators-5tjl2" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.870768 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d021b9f-72be-49d6-9fc8-5cc94e6e33ca-catalog-content\") pod \"redhat-operators-5tjl2\" (UID: \"0d021b9f-72be-49d6-9fc8-5cc94e6e33ca\") " pod="openshift-marketplace/redhat-operators-5tjl2" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.871175 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d021b9f-72be-49d6-9fc8-5cc94e6e33ca-utilities\") pod \"redhat-operators-5tjl2\" (UID: \"0d021b9f-72be-49d6-9fc8-5cc94e6e33ca\") " pod="openshift-marketplace/redhat-operators-5tjl2" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.871200 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d021b9f-72be-49d6-9fc8-5cc94e6e33ca-catalog-content\") pod \"redhat-operators-5tjl2\" (UID: \"0d021b9f-72be-49d6-9fc8-5cc94e6e33ca\") " pod="openshift-marketplace/redhat-operators-5tjl2" Jan 02 00:11:00 crc kubenswrapper[4766]: I0102 00:11:00.890039 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhpmz\" (UniqueName: \"kubernetes.io/projected/0d021b9f-72be-49d6-9fc8-5cc94e6e33ca-kube-api-access-rhpmz\") pod \"redhat-operators-5tjl2\" (UID: \"0d021b9f-72be-49d6-9fc8-5cc94e6e33ca\") " pod="openshift-marketplace/redhat-operators-5tjl2" Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.005592 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7bdbd88689-kjxjr"] Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.005883 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" podUID="62c6bad5-b417-4f5c-aa08-737a3b1e8b57" containerName="controller-manager" containerID="cri-o://f7e8fc0d5c58e1eaf99121b82c6d012216aabcf7b6843919ee87eca08f5b7a9b" gracePeriod=30 Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.047602 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5tjl2" Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.107713 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6"] Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.107973 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" podUID="ba296a10-72d7-4719-9fcf-31bc93249ee2" containerName="route-controller-manager" containerID="cri-o://dcc08280ab5a3bca61ebaa0e584f8adcb487d25e799a8c428657ac903139a8ae" gracePeriod=30 Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.480105 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5tjl2"] Jan 02 00:11:01 crc kubenswrapper[4766]: W0102 00:11:01.488526 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d021b9f_72be_49d6_9fc8_5cc94e6e33ca.slice/crio-05d6557a0bbed9870d843ab8c89b128cba4b4b33ffbfedc5f064c266eb8dcdca WatchSource:0}: Error finding container 05d6557a0bbed9870d843ab8c89b128cba4b4b33ffbfedc5f064c266eb8dcdca: Status 404 returned error can't find the container with id 05d6557a0bbed9870d843ab8c89b128cba4b4b33ffbfedc5f064c266eb8dcdca Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.541048 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5f043c1-da6b-433d-8db2-47b8d31ab74c" path="/var/lib/kubelet/pods/c5f043c1-da6b-433d-8db2-47b8d31ab74c/volumes" Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.543815 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.580474 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ba296a10-72d7-4719-9fcf-31bc93249ee2-serving-cert\") pod \"ba296a10-72d7-4719-9fcf-31bc93249ee2\" (UID: \"ba296a10-72d7-4719-9fcf-31bc93249ee2\") " Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.580555 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba296a10-72d7-4719-9fcf-31bc93249ee2-config\") pod \"ba296a10-72d7-4719-9fcf-31bc93249ee2\" (UID: \"ba296a10-72d7-4719-9fcf-31bc93249ee2\") " Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.580591 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxzzh\" (UniqueName: \"kubernetes.io/projected/ba296a10-72d7-4719-9fcf-31bc93249ee2-kube-api-access-nxzzh\") pod \"ba296a10-72d7-4719-9fcf-31bc93249ee2\" (UID: \"ba296a10-72d7-4719-9fcf-31bc93249ee2\") " Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.580635 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ba296a10-72d7-4719-9fcf-31bc93249ee2-client-ca\") pod \"ba296a10-72d7-4719-9fcf-31bc93249ee2\" (UID: \"ba296a10-72d7-4719-9fcf-31bc93249ee2\") " Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.581619 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba296a10-72d7-4719-9fcf-31bc93249ee2-client-ca" (OuterVolumeSpecName: "client-ca") pod "ba296a10-72d7-4719-9fcf-31bc93249ee2" (UID: "ba296a10-72d7-4719-9fcf-31bc93249ee2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.581642 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba296a10-72d7-4719-9fcf-31bc93249ee2-config" (OuterVolumeSpecName: "config") pod "ba296a10-72d7-4719-9fcf-31bc93249ee2" (UID: "ba296a10-72d7-4719-9fcf-31bc93249ee2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.585882 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba296a10-72d7-4719-9fcf-31bc93249ee2-kube-api-access-nxzzh" (OuterVolumeSpecName: "kube-api-access-nxzzh") pod "ba296a10-72d7-4719-9fcf-31bc93249ee2" (UID: "ba296a10-72d7-4719-9fcf-31bc93249ee2"). InnerVolumeSpecName "kube-api-access-nxzzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.588956 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba296a10-72d7-4719-9fcf-31bc93249ee2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ba296a10-72d7-4719-9fcf-31bc93249ee2" (UID: "ba296a10-72d7-4719-9fcf-31bc93249ee2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.682740 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ba296a10-72d7-4719-9fcf-31bc93249ee2-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.682774 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba296a10-72d7-4719-9fcf-31bc93249ee2-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.682784 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxzzh\" (UniqueName: \"kubernetes.io/projected/ba296a10-72d7-4719-9fcf-31bc93249ee2-kube-api-access-nxzzh\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:01 crc kubenswrapper[4766]: I0102 00:11:01.682794 4766 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ba296a10-72d7-4719-9fcf-31bc93249ee2-client-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.023041 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.068138 4766 generic.go:334] "Generic (PLEG): container finished" podID="0d021b9f-72be-49d6-9fc8-5cc94e6e33ca" containerID="a82f59290341fe996e98a30d813566618f6bbc56e6b90c3d3ba7eb41d2b14496" exitCode=0 Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.069009 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tjl2" event={"ID":"0d021b9f-72be-49d6-9fc8-5cc94e6e33ca","Type":"ContainerDied","Data":"a82f59290341fe996e98a30d813566618f6bbc56e6b90c3d3ba7eb41d2b14496"} Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.069046 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tjl2" event={"ID":"0d021b9f-72be-49d6-9fc8-5cc94e6e33ca","Type":"ContainerStarted","Data":"05d6557a0bbed9870d843ab8c89b128cba4b4b33ffbfedc5f064c266eb8dcdca"} Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.077309 4766 generic.go:334] "Generic (PLEG): container finished" podID="ba296a10-72d7-4719-9fcf-31bc93249ee2" containerID="dcc08280ab5a3bca61ebaa0e584f8adcb487d25e799a8c428657ac903139a8ae" exitCode=0 Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.077383 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" event={"ID":"ba296a10-72d7-4719-9fcf-31bc93249ee2","Type":"ContainerDied","Data":"dcc08280ab5a3bca61ebaa0e584f8adcb487d25e799a8c428657ac903139a8ae"} Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.077411 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" event={"ID":"ba296a10-72d7-4719-9fcf-31bc93249ee2","Type":"ContainerDied","Data":"3dc8c65325f2c25c8bfc630f206d3e026c2e1d10b08740deee03ad9b7f92c3b2"} Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.077427 4766 scope.go:117] "RemoveContainer" containerID="dcc08280ab5a3bca61ebaa0e584f8adcb487d25e799a8c428657ac903139a8ae" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.077535 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.087385 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rb7km\" (UniqueName: \"kubernetes.io/projected/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-kube-api-access-rb7km\") pod \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.087471 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-serving-cert\") pod \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.087522 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-client-ca\") pod \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.087608 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-config\") pod \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.087660 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-proxy-ca-bundles\") pod \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\" (UID: \"62c6bad5-b417-4f5c-aa08-737a3b1e8b57\") " Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.088977 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-config" (OuterVolumeSpecName: "config") pod "62c6bad5-b417-4f5c-aa08-737a3b1e8b57" (UID: "62c6bad5-b417-4f5c-aa08-737a3b1e8b57"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.089195 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "62c6bad5-b417-4f5c-aa08-737a3b1e8b57" (UID: "62c6bad5-b417-4f5c-aa08-737a3b1e8b57"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.090877 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-client-ca" (OuterVolumeSpecName: "client-ca") pod "62c6bad5-b417-4f5c-aa08-737a3b1e8b57" (UID: "62c6bad5-b417-4f5c-aa08-737a3b1e8b57"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.093579 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "62c6bad5-b417-4f5c-aa08-737a3b1e8b57" (UID: "62c6bad5-b417-4f5c-aa08-737a3b1e8b57"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.093744 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-kube-api-access-rb7km" (OuterVolumeSpecName: "kube-api-access-rb7km") pod "62c6bad5-b417-4f5c-aa08-737a3b1e8b57" (UID: "62c6bad5-b417-4f5c-aa08-737a3b1e8b57"). InnerVolumeSpecName "kube-api-access-rb7km". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.101667 4766 generic.go:334] "Generic (PLEG): container finished" podID="62c6bad5-b417-4f5c-aa08-737a3b1e8b57" containerID="f7e8fc0d5c58e1eaf99121b82c6d012216aabcf7b6843919ee87eca08f5b7a9b" exitCode=0 Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.101705 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" event={"ID":"62c6bad5-b417-4f5c-aa08-737a3b1e8b57","Type":"ContainerDied","Data":"f7e8fc0d5c58e1eaf99121b82c6d012216aabcf7b6843919ee87eca08f5b7a9b"} Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.101733 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" event={"ID":"62c6bad5-b417-4f5c-aa08-737a3b1e8b57","Type":"ContainerDied","Data":"33ff14dcb481ebbc4a08f6490a54b8d2f0d03ba5b7ea61a7f5777d60420effc9"} Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.101779 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bdbd88689-kjxjr" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.124011 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vwvkb"] Jan 02 00:11:02 crc kubenswrapper[4766]: E0102 00:11:02.124278 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba296a10-72d7-4719-9fcf-31bc93249ee2" containerName="route-controller-manager" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.124296 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba296a10-72d7-4719-9fcf-31bc93249ee2" containerName="route-controller-manager" Jan 02 00:11:02 crc kubenswrapper[4766]: E0102 00:11:02.124312 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62c6bad5-b417-4f5c-aa08-737a3b1e8b57" containerName="controller-manager" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.124320 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="62c6bad5-b417-4f5c-aa08-737a3b1e8b57" containerName="controller-manager" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.124417 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba296a10-72d7-4719-9fcf-31bc93249ee2" containerName="route-controller-manager" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.124426 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="62c6bad5-b417-4f5c-aa08-737a3b1e8b57" containerName="controller-manager" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.125215 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vwvkb" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.128837 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.134569 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vwvkb"] Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.145778 4766 scope.go:117] "RemoveContainer" containerID="dcc08280ab5a3bca61ebaa0e584f8adcb487d25e799a8c428657ac903139a8ae" Jan 02 00:11:02 crc kubenswrapper[4766]: E0102 00:11:02.146231 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcc08280ab5a3bca61ebaa0e584f8adcb487d25e799a8c428657ac903139a8ae\": container with ID starting with dcc08280ab5a3bca61ebaa0e584f8adcb487d25e799a8c428657ac903139a8ae not found: ID does not exist" containerID="dcc08280ab5a3bca61ebaa0e584f8adcb487d25e799a8c428657ac903139a8ae" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.146356 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcc08280ab5a3bca61ebaa0e584f8adcb487d25e799a8c428657ac903139a8ae"} err="failed to get container status \"dcc08280ab5a3bca61ebaa0e584f8adcb487d25e799a8c428657ac903139a8ae\": rpc error: code = NotFound desc = could not find container \"dcc08280ab5a3bca61ebaa0e584f8adcb487d25e799a8c428657ac903139a8ae\": container with ID starting with dcc08280ab5a3bca61ebaa0e584f8adcb487d25e799a8c428657ac903139a8ae not found: ID does not exist" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.146379 4766 scope.go:117] "RemoveContainer" containerID="f7e8fc0d5c58e1eaf99121b82c6d012216aabcf7b6843919ee87eca08f5b7a9b" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.165020 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6"] Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.167379 4766 scope.go:117] "RemoveContainer" containerID="f7e8fc0d5c58e1eaf99121b82c6d012216aabcf7b6843919ee87eca08f5b7a9b" Jan 02 00:11:02 crc kubenswrapper[4766]: E0102 00:11:02.167804 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7e8fc0d5c58e1eaf99121b82c6d012216aabcf7b6843919ee87eca08f5b7a9b\": container with ID starting with f7e8fc0d5c58e1eaf99121b82c6d012216aabcf7b6843919ee87eca08f5b7a9b not found: ID does not exist" containerID="f7e8fc0d5c58e1eaf99121b82c6d012216aabcf7b6843919ee87eca08f5b7a9b" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.167868 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7e8fc0d5c58e1eaf99121b82c6d012216aabcf7b6843919ee87eca08f5b7a9b"} err="failed to get container status \"f7e8fc0d5c58e1eaf99121b82c6d012216aabcf7b6843919ee87eca08f5b7a9b\": rpc error: code = NotFound desc = could not find container \"f7e8fc0d5c58e1eaf99121b82c6d012216aabcf7b6843919ee87eca08f5b7a9b\": container with ID starting with f7e8fc0d5c58e1eaf99121b82c6d012216aabcf7b6843919ee87eca08f5b7a9b not found: ID does not exist" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.169110 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-795fdd6df6-tmcd6"] Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.171285 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7bdbd88689-kjxjr"] Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.173359 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7bdbd88689-kjxjr"] Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.188787 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmcqm\" (UniqueName: \"kubernetes.io/projected/af16157a-9cb1-4df3-a0c1-66839e592859-kube-api-access-dmcqm\") pod \"community-operators-vwvkb\" (UID: \"af16157a-9cb1-4df3-a0c1-66839e592859\") " pod="openshift-marketplace/community-operators-vwvkb" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.188835 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af16157a-9cb1-4df3-a0c1-66839e592859-utilities\") pod \"community-operators-vwvkb\" (UID: \"af16157a-9cb1-4df3-a0c1-66839e592859\") " pod="openshift-marketplace/community-operators-vwvkb" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.188869 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af16157a-9cb1-4df3-a0c1-66839e592859-catalog-content\") pod \"community-operators-vwvkb\" (UID: \"af16157a-9cb1-4df3-a0c1-66839e592859\") " pod="openshift-marketplace/community-operators-vwvkb" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.188940 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.188950 4766 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.188960 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rb7km\" (UniqueName: \"kubernetes.io/projected/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-kube-api-access-rb7km\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.188968 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.188977 4766 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/62c6bad5-b417-4f5c-aa08-737a3b1e8b57-client-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.290308 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmcqm\" (UniqueName: \"kubernetes.io/projected/af16157a-9cb1-4df3-a0c1-66839e592859-kube-api-access-dmcqm\") pod \"community-operators-vwvkb\" (UID: \"af16157a-9cb1-4df3-a0c1-66839e592859\") " pod="openshift-marketplace/community-operators-vwvkb" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.290385 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af16157a-9cb1-4df3-a0c1-66839e592859-utilities\") pod \"community-operators-vwvkb\" (UID: \"af16157a-9cb1-4df3-a0c1-66839e592859\") " pod="openshift-marketplace/community-operators-vwvkb" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.290406 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af16157a-9cb1-4df3-a0c1-66839e592859-catalog-content\") pod \"community-operators-vwvkb\" (UID: \"af16157a-9cb1-4df3-a0c1-66839e592859\") " pod="openshift-marketplace/community-operators-vwvkb" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.290936 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af16157a-9cb1-4df3-a0c1-66839e592859-utilities\") pod \"community-operators-vwvkb\" (UID: \"af16157a-9cb1-4df3-a0c1-66839e592859\") " pod="openshift-marketplace/community-operators-vwvkb" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.291054 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af16157a-9cb1-4df3-a0c1-66839e592859-catalog-content\") pod \"community-operators-vwvkb\" (UID: \"af16157a-9cb1-4df3-a0c1-66839e592859\") " pod="openshift-marketplace/community-operators-vwvkb" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.310152 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmcqm\" (UniqueName: \"kubernetes.io/projected/af16157a-9cb1-4df3-a0c1-66839e592859-kube-api-access-dmcqm\") pod \"community-operators-vwvkb\" (UID: \"af16157a-9cb1-4df3-a0c1-66839e592859\") " pod="openshift-marketplace/community-operators-vwvkb" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.452929 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vwvkb" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.691052 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4"] Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.692131 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.692796 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj"] Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.696245 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.698044 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.698326 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.698530 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.698758 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.698905 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.699142 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.701922 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.702038 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.702362 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.702676 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.702766 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.703719 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.709377 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4"] Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.715035 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.717027 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj"] Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.797502 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mppw9\" (UniqueName: \"kubernetes.io/projected/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-kube-api-access-mppw9\") pod \"route-controller-manager-58d4986c5-g6pg4\" (UID: \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\") " pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.797657 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-config\") pod \"route-controller-manager-58d4986c5-g6pg4\" (UID: \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\") " pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.797707 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3dc34986-2ecf-4078-b5d3-314e30838057-serving-cert\") pod \"controller-manager-6584d6d7d8-lbkjj\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.797737 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-client-ca\") pod \"route-controller-manager-58d4986c5-g6pg4\" (UID: \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\") " pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.797766 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-proxy-ca-bundles\") pod \"controller-manager-6584d6d7d8-lbkjj\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.797879 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-client-ca\") pod \"controller-manager-6584d6d7d8-lbkjj\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.797922 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-serving-cert\") pod \"route-controller-manager-58d4986c5-g6pg4\" (UID: \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\") " pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.797964 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-config\") pod \"controller-manager-6584d6d7d8-lbkjj\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.798000 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtvzz\" (UniqueName: \"kubernetes.io/projected/3dc34986-2ecf-4078-b5d3-314e30838057-kube-api-access-gtvzz\") pod \"controller-manager-6584d6d7d8-lbkjj\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.848434 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vwvkb"] Jan 02 00:11:02 crc kubenswrapper[4766]: W0102 00:11:02.851400 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf16157a_9cb1_4df3_a0c1_66839e592859.slice/crio-d024776eea819043066fe52c8d9d899e5a61aaf0970ff180990dbc04492e3631 WatchSource:0}: Error finding container d024776eea819043066fe52c8d9d899e5a61aaf0970ff180990dbc04492e3631: Status 404 returned error can't find the container with id d024776eea819043066fe52c8d9d899e5a61aaf0970ff180990dbc04492e3631 Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.899349 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-config\") pod \"controller-manager-6584d6d7d8-lbkjj\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.899402 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtvzz\" (UniqueName: \"kubernetes.io/projected/3dc34986-2ecf-4078-b5d3-314e30838057-kube-api-access-gtvzz\") pod \"controller-manager-6584d6d7d8-lbkjj\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.899425 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mppw9\" (UniqueName: \"kubernetes.io/projected/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-kube-api-access-mppw9\") pod \"route-controller-manager-58d4986c5-g6pg4\" (UID: \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\") " pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.899465 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-config\") pod \"route-controller-manager-58d4986c5-g6pg4\" (UID: \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\") " pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.899482 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3dc34986-2ecf-4078-b5d3-314e30838057-serving-cert\") pod \"controller-manager-6584d6d7d8-lbkjj\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.899499 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-client-ca\") pod \"route-controller-manager-58d4986c5-g6pg4\" (UID: \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\") " pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.899532 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-proxy-ca-bundles\") pod \"controller-manager-6584d6d7d8-lbkjj\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.899578 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-client-ca\") pod \"controller-manager-6584d6d7d8-lbkjj\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.899604 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-serving-cert\") pod \"route-controller-manager-58d4986c5-g6pg4\" (UID: \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\") " pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.900644 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-config\") pod \"route-controller-manager-58d4986c5-g6pg4\" (UID: \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\") " pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.900961 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-client-ca\") pod \"route-controller-manager-58d4986c5-g6pg4\" (UID: \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\") " pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.901484 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-client-ca\") pod \"controller-manager-6584d6d7d8-lbkjj\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.901569 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-proxy-ca-bundles\") pod \"controller-manager-6584d6d7d8-lbkjj\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.905600 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-serving-cert\") pod \"route-controller-manager-58d4986c5-g6pg4\" (UID: \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\") " pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.905833 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3dc34986-2ecf-4078-b5d3-314e30838057-serving-cert\") pod \"controller-manager-6584d6d7d8-lbkjj\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.907694 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-config\") pod \"controller-manager-6584d6d7d8-lbkjj\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.917640 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mppw9\" (UniqueName: \"kubernetes.io/projected/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-kube-api-access-mppw9\") pod \"route-controller-manager-58d4986c5-g6pg4\" (UID: \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\") " pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:11:02 crc kubenswrapper[4766]: I0102 00:11:02.922604 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtvzz\" (UniqueName: \"kubernetes.io/projected/3dc34986-2ecf-4078-b5d3-314e30838057-kube-api-access-gtvzz\") pod \"controller-manager-6584d6d7d8-lbkjj\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.016865 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.039222 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.114870 4766 generic.go:334] "Generic (PLEG): container finished" podID="af16157a-9cb1-4df3-a0c1-66839e592859" containerID="04959a287f905859aa06ab99a19d512b9ddd76e6ae8b51eaf900dcff5cf55f9b" exitCode=0 Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.114937 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwvkb" event={"ID":"af16157a-9cb1-4df3-a0c1-66839e592859","Type":"ContainerDied","Data":"04959a287f905859aa06ab99a19d512b9ddd76e6ae8b51eaf900dcff5cf55f9b"} Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.115240 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwvkb" event={"ID":"af16157a-9cb1-4df3-a0c1-66839e592859","Type":"ContainerStarted","Data":"d024776eea819043066fe52c8d9d899e5a61aaf0970ff180990dbc04492e3631"} Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.134805 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-djhdl"] Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.135821 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-djhdl" Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.138246 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.146421 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tjl2" event={"ID":"0d021b9f-72be-49d6-9fc8-5cc94e6e33ca","Type":"ContainerStarted","Data":"3f72a0061c923ef916cfaf845e24d9dcc746b770501e57a29d5b9e7225f5f5e4"} Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.162230 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-djhdl"] Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.202458 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30d047be-f175-4f90-b77a-34a24fea709d-catalog-content\") pod \"certified-operators-djhdl\" (UID: \"30d047be-f175-4f90-b77a-34a24fea709d\") " pod="openshift-marketplace/certified-operators-djhdl" Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.202776 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9h66\" (UniqueName: \"kubernetes.io/projected/30d047be-f175-4f90-b77a-34a24fea709d-kube-api-access-n9h66\") pod \"certified-operators-djhdl\" (UID: \"30d047be-f175-4f90-b77a-34a24fea709d\") " pod="openshift-marketplace/certified-operators-djhdl" Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.202884 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30d047be-f175-4f90-b77a-34a24fea709d-utilities\") pod \"certified-operators-djhdl\" (UID: \"30d047be-f175-4f90-b77a-34a24fea709d\") " pod="openshift-marketplace/certified-operators-djhdl" Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.304642 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30d047be-f175-4f90-b77a-34a24fea709d-utilities\") pod \"certified-operators-djhdl\" (UID: \"30d047be-f175-4f90-b77a-34a24fea709d\") " pod="openshift-marketplace/certified-operators-djhdl" Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.304782 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30d047be-f175-4f90-b77a-34a24fea709d-catalog-content\") pod \"certified-operators-djhdl\" (UID: \"30d047be-f175-4f90-b77a-34a24fea709d\") " pod="openshift-marketplace/certified-operators-djhdl" Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.304832 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9h66\" (UniqueName: \"kubernetes.io/projected/30d047be-f175-4f90-b77a-34a24fea709d-kube-api-access-n9h66\") pod \"certified-operators-djhdl\" (UID: \"30d047be-f175-4f90-b77a-34a24fea709d\") " pod="openshift-marketplace/certified-operators-djhdl" Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.306098 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30d047be-f175-4f90-b77a-34a24fea709d-utilities\") pod \"certified-operators-djhdl\" (UID: \"30d047be-f175-4f90-b77a-34a24fea709d\") " pod="openshift-marketplace/certified-operators-djhdl" Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.306595 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30d047be-f175-4f90-b77a-34a24fea709d-catalog-content\") pod \"certified-operators-djhdl\" (UID: \"30d047be-f175-4f90-b77a-34a24fea709d\") " pod="openshift-marketplace/certified-operators-djhdl" Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.325165 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9h66\" (UniqueName: \"kubernetes.io/projected/30d047be-f175-4f90-b77a-34a24fea709d-kube-api-access-n9h66\") pod \"certified-operators-djhdl\" (UID: \"30d047be-f175-4f90-b77a-34a24fea709d\") " pod="openshift-marketplace/certified-operators-djhdl" Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.458724 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-djhdl" Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.490183 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4"] Jan 02 00:11:03 crc kubenswrapper[4766]: W0102 00:11:03.493702 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode60ebdb8_c5e0_4f5e_96ce_59f6df2d0652.slice/crio-abdf08ae5c27a918e36f4b03b9af73b5be878a7d5427ad14ecd3af50b4193c6b WatchSource:0}: Error finding container abdf08ae5c27a918e36f4b03b9af73b5be878a7d5427ad14ecd3af50b4193c6b: Status 404 returned error can't find the container with id abdf08ae5c27a918e36f4b03b9af73b5be878a7d5427ad14ecd3af50b4193c6b Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.526431 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj"] Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.540554 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62c6bad5-b417-4f5c-aa08-737a3b1e8b57" path="/var/lib/kubelet/pods/62c6bad5-b417-4f5c-aa08-737a3b1e8b57/volumes" Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.541499 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba296a10-72d7-4719-9fcf-31bc93249ee2" path="/var/lib/kubelet/pods/ba296a10-72d7-4719-9fcf-31bc93249ee2/volumes" Jan 02 00:11:03 crc kubenswrapper[4766]: W0102 00:11:03.557781 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dc34986_2ecf_4078_b5d3_314e30838057.slice/crio-55fd7a28ff227b730df672991da73bd10ae19fb309c924ccceb34a2b7e45283c WatchSource:0}: Error finding container 55fd7a28ff227b730df672991da73bd10ae19fb309c924ccceb34a2b7e45283c: Status 404 returned error can't find the container with id 55fd7a28ff227b730df672991da73bd10ae19fb309c924ccceb34a2b7e45283c Jan 02 00:11:03 crc kubenswrapper[4766]: I0102 00:11:03.975119 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-djhdl"] Jan 02 00:11:03 crc kubenswrapper[4766]: W0102 00:11:03.985046 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod30d047be_f175_4f90_b77a_34a24fea709d.slice/crio-854d93f472164ce17cdb718a2e40f8e8e71a7663832b94d085db2aa088c5855a WatchSource:0}: Error finding container 854d93f472164ce17cdb718a2e40f8e8e71a7663832b94d085db2aa088c5855a: Status 404 returned error can't find the container with id 854d93f472164ce17cdb718a2e40f8e8e71a7663832b94d085db2aa088c5855a Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.152928 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" event={"ID":"3dc34986-2ecf-4078-b5d3-314e30838057","Type":"ContainerStarted","Data":"2246cc8d21557651088a3e91202a42e2c0363ddf7eb3ee3cad757031e43aa2a8"} Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.152980 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" event={"ID":"3dc34986-2ecf-4078-b5d3-314e30838057","Type":"ContainerStarted","Data":"55fd7a28ff227b730df672991da73bd10ae19fb309c924ccceb34a2b7e45283c"} Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.153186 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.154874 4766 generic.go:334] "Generic (PLEG): container finished" podID="0d021b9f-72be-49d6-9fc8-5cc94e6e33ca" containerID="3f72a0061c923ef916cfaf845e24d9dcc746b770501e57a29d5b9e7225f5f5e4" exitCode=0 Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.154928 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tjl2" event={"ID":"0d021b9f-72be-49d6-9fc8-5cc94e6e33ca","Type":"ContainerDied","Data":"3f72a0061c923ef916cfaf845e24d9dcc746b770501e57a29d5b9e7225f5f5e4"} Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.160801 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwvkb" event={"ID":"af16157a-9cb1-4df3-a0c1-66839e592859","Type":"ContainerStarted","Data":"6be9de2158a11f49ab4c3198277c3f5119c5176100eace9834ffd1f49f2e3c12"} Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.160879 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.162775 4766 generic.go:334] "Generic (PLEG): container finished" podID="30d047be-f175-4f90-b77a-34a24fea709d" containerID="f3e0ac8098e5ed1c08b37bf493b16d88a1ccef23a5c663ca08142d4ef21e0cc0" exitCode=0 Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.162881 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djhdl" event={"ID":"30d047be-f175-4f90-b77a-34a24fea709d","Type":"ContainerDied","Data":"f3e0ac8098e5ed1c08b37bf493b16d88a1ccef23a5c663ca08142d4ef21e0cc0"} Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.162921 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djhdl" event={"ID":"30d047be-f175-4f90-b77a-34a24fea709d","Type":"ContainerStarted","Data":"854d93f472164ce17cdb718a2e40f8e8e71a7663832b94d085db2aa088c5855a"} Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.165977 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" event={"ID":"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652","Type":"ContainerStarted","Data":"fafa1deeba6f113daaef41aaa7805995e8c27fec5d0e9f538a460d019a7a99a3"} Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.165996 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" event={"ID":"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652","Type":"ContainerStarted","Data":"abdf08ae5c27a918e36f4b03b9af73b5be878a7d5427ad14ecd3af50b4193c6b"} Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.166702 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.177983 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" podStartSLOduration=3.177968705 podStartE2EDuration="3.177968705s" podCreationTimestamp="2026-01-02 00:11:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:11:04.172015883 +0000 UTC m=+238.970503037" watchObservedRunningTime="2026-01-02 00:11:04.177968705 +0000 UTC m=+238.976455859" Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.181053 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.283396 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" podStartSLOduration=3.283381101 podStartE2EDuration="3.283381101s" podCreationTimestamp="2026-01-02 00:11:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:11:04.28148473 +0000 UTC m=+239.079971884" watchObservedRunningTime="2026-01-02 00:11:04.283381101 +0000 UTC m=+239.081868255" Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.530887 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-x4q7d"] Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.532796 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x4q7d" Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.535731 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.539447 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x4q7d"] Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.629190 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hxz9\" (UniqueName: \"kubernetes.io/projected/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-kube-api-access-5hxz9\") pod \"redhat-marketplace-x4q7d\" (UID: \"40c7eea3-f2b9-4769-a43f-d31eb3ff193f\") " pod="openshift-marketplace/redhat-marketplace-x4q7d" Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.629442 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-utilities\") pod \"redhat-marketplace-x4q7d\" (UID: \"40c7eea3-f2b9-4769-a43f-d31eb3ff193f\") " pod="openshift-marketplace/redhat-marketplace-x4q7d" Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.629617 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-catalog-content\") pod \"redhat-marketplace-x4q7d\" (UID: \"40c7eea3-f2b9-4769-a43f-d31eb3ff193f\") " pod="openshift-marketplace/redhat-marketplace-x4q7d" Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.730690 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hxz9\" (UniqueName: \"kubernetes.io/projected/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-kube-api-access-5hxz9\") pod \"redhat-marketplace-x4q7d\" (UID: \"40c7eea3-f2b9-4769-a43f-d31eb3ff193f\") " pod="openshift-marketplace/redhat-marketplace-x4q7d" Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.730749 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-utilities\") pod \"redhat-marketplace-x4q7d\" (UID: \"40c7eea3-f2b9-4769-a43f-d31eb3ff193f\") " pod="openshift-marketplace/redhat-marketplace-x4q7d" Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.730801 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-catalog-content\") pod \"redhat-marketplace-x4q7d\" (UID: \"40c7eea3-f2b9-4769-a43f-d31eb3ff193f\") " pod="openshift-marketplace/redhat-marketplace-x4q7d" Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.732024 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-utilities\") pod \"redhat-marketplace-x4q7d\" (UID: \"40c7eea3-f2b9-4769-a43f-d31eb3ff193f\") " pod="openshift-marketplace/redhat-marketplace-x4q7d" Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.732037 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-catalog-content\") pod \"redhat-marketplace-x4q7d\" (UID: \"40c7eea3-f2b9-4769-a43f-d31eb3ff193f\") " pod="openshift-marketplace/redhat-marketplace-x4q7d" Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.755912 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hxz9\" (UniqueName: \"kubernetes.io/projected/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-kube-api-access-5hxz9\") pod \"redhat-marketplace-x4q7d\" (UID: \"40c7eea3-f2b9-4769-a43f-d31eb3ff193f\") " pod="openshift-marketplace/redhat-marketplace-x4q7d" Jan 02 00:11:04 crc kubenswrapper[4766]: I0102 00:11:04.861552 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x4q7d" Jan 02 00:11:05 crc kubenswrapper[4766]: I0102 00:11:05.173401 4766 generic.go:334] "Generic (PLEG): container finished" podID="af16157a-9cb1-4df3-a0c1-66839e592859" containerID="6be9de2158a11f49ab4c3198277c3f5119c5176100eace9834ffd1f49f2e3c12" exitCode=0 Jan 02 00:11:05 crc kubenswrapper[4766]: I0102 00:11:05.173543 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwvkb" event={"ID":"af16157a-9cb1-4df3-a0c1-66839e592859","Type":"ContainerDied","Data":"6be9de2158a11f49ab4c3198277c3f5119c5176100eace9834ffd1f49f2e3c12"} Jan 02 00:11:05 crc kubenswrapper[4766]: I0102 00:11:05.180003 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tjl2" event={"ID":"0d021b9f-72be-49d6-9fc8-5cc94e6e33ca","Type":"ContainerStarted","Data":"d86989e35641ec54a6ecc9863cbfe6de8cdc57c12e63de9de10038a3470e66eb"} Jan 02 00:11:05 crc kubenswrapper[4766]: I0102 00:11:05.212221 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5tjl2" podStartSLOduration=2.499429096 podStartE2EDuration="5.212199751s" podCreationTimestamp="2026-01-02 00:11:00 +0000 UTC" firstStartedPulling="2026-01-02 00:11:02.072242298 +0000 UTC m=+236.870729452" lastFinishedPulling="2026-01-02 00:11:04.785012963 +0000 UTC m=+239.583500107" observedRunningTime="2026-01-02 00:11:05.206429963 +0000 UTC m=+240.004917137" watchObservedRunningTime="2026-01-02 00:11:05.212199751 +0000 UTC m=+240.010686905" Jan 02 00:11:05 crc kubenswrapper[4766]: I0102 00:11:05.349297 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x4q7d"] Jan 02 00:11:05 crc kubenswrapper[4766]: W0102 00:11:05.351051 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40c7eea3_f2b9_4769_a43f_d31eb3ff193f.slice/crio-a31f0dda2da569ead918cb59be21f9e9da4a7c835dedf8137e92528e35da4daf WatchSource:0}: Error finding container a31f0dda2da569ead918cb59be21f9e9da4a7c835dedf8137e92528e35da4daf: Status 404 returned error can't find the container with id a31f0dda2da569ead918cb59be21f9e9da4a7c835dedf8137e92528e35da4daf Jan 02 00:11:06 crc kubenswrapper[4766]: I0102 00:11:06.187401 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwvkb" event={"ID":"af16157a-9cb1-4df3-a0c1-66839e592859","Type":"ContainerStarted","Data":"f5fab105e8c75c88259f8e235dca93ff23226cbdf1abc7e34ab63eccb7bcadfd"} Jan 02 00:11:06 crc kubenswrapper[4766]: I0102 00:11:06.189884 4766 generic.go:334] "Generic (PLEG): container finished" podID="30d047be-f175-4f90-b77a-34a24fea709d" containerID="3b34dd73e4eb3f468ebb1719d90632e15b852b7e98b0b774ddceb2e2bd0121e2" exitCode=0 Jan 02 00:11:06 crc kubenswrapper[4766]: I0102 00:11:06.189952 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djhdl" event={"ID":"30d047be-f175-4f90-b77a-34a24fea709d","Type":"ContainerDied","Data":"3b34dd73e4eb3f468ebb1719d90632e15b852b7e98b0b774ddceb2e2bd0121e2"} Jan 02 00:11:06 crc kubenswrapper[4766]: I0102 00:11:06.191252 4766 generic.go:334] "Generic (PLEG): container finished" podID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" containerID="bb65587edc630ca31e867f325ea6ab24d0a7a2765550712182ac9740fa8997bd" exitCode=0 Jan 02 00:11:06 crc kubenswrapper[4766]: I0102 00:11:06.191275 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x4q7d" event={"ID":"40c7eea3-f2b9-4769-a43f-d31eb3ff193f","Type":"ContainerDied","Data":"bb65587edc630ca31e867f325ea6ab24d0a7a2765550712182ac9740fa8997bd"} Jan 02 00:11:06 crc kubenswrapper[4766]: I0102 00:11:06.191302 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x4q7d" event={"ID":"40c7eea3-f2b9-4769-a43f-d31eb3ff193f","Type":"ContainerStarted","Data":"a31f0dda2da569ead918cb59be21f9e9da4a7c835dedf8137e92528e35da4daf"} Jan 02 00:11:06 crc kubenswrapper[4766]: I0102 00:11:06.206370 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vwvkb" podStartSLOduration=1.536046198 podStartE2EDuration="4.206349818s" podCreationTimestamp="2026-01-02 00:11:02 +0000 UTC" firstStartedPulling="2026-01-02 00:11:03.116885738 +0000 UTC m=+237.915372892" lastFinishedPulling="2026-01-02 00:11:05.787189358 +0000 UTC m=+240.585676512" observedRunningTime="2026-01-02 00:11:06.204134427 +0000 UTC m=+241.002621591" watchObservedRunningTime="2026-01-02 00:11:06.206349818 +0000 UTC m=+241.004836972" Jan 02 00:11:07 crc kubenswrapper[4766]: I0102 00:11:07.199715 4766 generic.go:334] "Generic (PLEG): container finished" podID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" containerID="de615bfe9f1efeeba1e93ac5cd0837637cbe25f947275a877a8829b78d21217d" exitCode=0 Jan 02 00:11:07 crc kubenswrapper[4766]: I0102 00:11:07.199766 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x4q7d" event={"ID":"40c7eea3-f2b9-4769-a43f-d31eb3ff193f","Type":"ContainerDied","Data":"de615bfe9f1efeeba1e93ac5cd0837637cbe25f947275a877a8829b78d21217d"} Jan 02 00:11:07 crc kubenswrapper[4766]: I0102 00:11:07.205513 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djhdl" event={"ID":"30d047be-f175-4f90-b77a-34a24fea709d","Type":"ContainerStarted","Data":"42f0c9b842338f326cd5841a29c657eae8c04c1948d008f08f0f75531ddc85ee"} Jan 02 00:11:07 crc kubenswrapper[4766]: I0102 00:11:07.246460 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-djhdl" podStartSLOduration=1.791759007 podStartE2EDuration="4.246441093s" podCreationTimestamp="2026-01-02 00:11:03 +0000 UTC" firstStartedPulling="2026-01-02 00:11:04.163992115 +0000 UTC m=+238.962479269" lastFinishedPulling="2026-01-02 00:11:06.618674201 +0000 UTC m=+241.417161355" observedRunningTime="2026-01-02 00:11:07.24557441 +0000 UTC m=+242.044061564" watchObservedRunningTime="2026-01-02 00:11:07.246441093 +0000 UTC m=+242.044928247" Jan 02 00:11:08 crc kubenswrapper[4766]: I0102 00:11:08.212798 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x4q7d" event={"ID":"40c7eea3-f2b9-4769-a43f-d31eb3ff193f","Type":"ContainerStarted","Data":"5c50bc9bb574a098ae0a626687a2293d4ca43484519861cdac73f84ea120fac9"} Jan 02 00:11:08 crc kubenswrapper[4766]: I0102 00:11:08.234964 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-x4q7d" podStartSLOduration=2.781994483 podStartE2EDuration="4.234942546s" podCreationTimestamp="2026-01-02 00:11:04 +0000 UTC" firstStartedPulling="2026-01-02 00:11:06.192315146 +0000 UTC m=+240.990802300" lastFinishedPulling="2026-01-02 00:11:07.645263209 +0000 UTC m=+242.443750363" observedRunningTime="2026-01-02 00:11:08.230803303 +0000 UTC m=+243.029290457" watchObservedRunningTime="2026-01-02 00:11:08.234942546 +0000 UTC m=+243.033429700" Jan 02 00:11:09 crc kubenswrapper[4766]: I0102 00:11:09.768587 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-85btb" Jan 02 00:11:09 crc kubenswrapper[4766]: I0102 00:11:09.820141 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-krqht"] Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.988468 4766 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.990359 4766 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.990465 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.991126 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee" gracePeriod=15 Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.991176 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa" gracePeriod=15 Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.991239 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087" gracePeriod=15 Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.991261 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd" gracePeriod=15 Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.991130 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e" gracePeriod=15 Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.992319 4766 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 02 00:11:10 crc kubenswrapper[4766]: E0102 00:11:10.992576 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.992626 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 02 00:11:10 crc kubenswrapper[4766]: E0102 00:11:10.992644 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.992655 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 02 00:11:10 crc kubenswrapper[4766]: E0102 00:11:10.992665 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.992671 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 02 00:11:10 crc kubenswrapper[4766]: E0102 00:11:10.992713 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.992724 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 02 00:11:10 crc kubenswrapper[4766]: E0102 00:11:10.992733 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.992741 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 02 00:11:10 crc kubenswrapper[4766]: E0102 00:11:10.992751 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.992758 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.992952 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.992970 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.992979 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.993062 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.993072 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 02 00:11:10 crc kubenswrapper[4766]: E0102 00:11:10.993197 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.993222 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 02 00:11:10 crc kubenswrapper[4766]: I0102 00:11:10.993370 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.038166 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.048685 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5tjl2" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.049679 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5tjl2" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.086174 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5tjl2" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.089556 4766 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.126744 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.126798 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.126881 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.126938 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.126972 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.127035 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.127055 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.127075 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.227964 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.228000 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.228017 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.228035 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.228055 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.228109 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.228135 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.228155 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.228151 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.228094 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.228205 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.228166 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.228353 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.228443 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.228509 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.228557 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.275120 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5tjl2" Jan 02 00:11:11 crc kubenswrapper[4766]: I0102 00:11:11.332583 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:11 crc kubenswrapper[4766]: E0102 00:11:11.974293 4766 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.200:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1886c182f3f6f3ca openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Created,Message:Created container startup-monitor,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-02 00:11:11.97245537 +0000 UTC m=+246.770942554,LastTimestamp:2026-01-02 00:11:11.97245537 +0000 UTC m=+246.770942554,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.235633 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"f7e2716442b76e0655a9600ffba742884cb635040f141129d51029fc81b2d7e5"} Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.235681 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"67549b7ac9b9d7a6310d464ebf86351d1d4b01ee69b12cb3e09ae928c98cec97"} Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.236244 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.238175 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.239473 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.240149 4766 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e" exitCode=0 Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.240169 4766 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087" exitCode=0 Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.240180 4766 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa" exitCode=0 Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.240188 4766 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd" exitCode=2 Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.240250 4766 scope.go:117] "RemoveContainer" containerID="236a350c5d966064a740b7c8470a969608270f2551321718505d72dddf1c6498" Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.242219 4766 generic.go:334] "Generic (PLEG): container finished" podID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" containerID="0b90c39abd328b9f254cea759281ecb100b3121d844c993720fa490e54601566" exitCode=0 Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.242698 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0","Type":"ContainerDied","Data":"0b90c39abd328b9f254cea759281ecb100b3121d844c993720fa490e54601566"} Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.243923 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.244113 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.453361 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vwvkb" Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.453436 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vwvkb" Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.497487 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vwvkb" Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.498005 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.498211 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:12 crc kubenswrapper[4766]: I0102 00:11:12.498407 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.253757 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.340101 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vwvkb" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.340881 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.341241 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.341488 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.459181 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-djhdl" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.459225 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-djhdl" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.503489 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-djhdl" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.504183 4766 status_manager.go:851] "Failed to get status for pod" podUID="30d047be-f175-4f90-b77a-34a24fea709d" pod="openshift-marketplace/certified-operators-djhdl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djhdl\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.504705 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.505189 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.505426 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.612132 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.612704 4766 status_manager.go:851] "Failed to get status for pod" podUID="30d047be-f175-4f90-b77a-34a24fea709d" pod="openshift-marketplace/certified-operators-djhdl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djhdl\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.613217 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.613706 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.614113 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.774748 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-kube-api-access\") pod \"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0\" (UID: \"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0\") " Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.774860 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-kubelet-dir\") pod \"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0\" (UID: \"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0\") " Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.774930 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-var-lock\") pod \"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0\" (UID: \"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0\") " Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.775053 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" (UID: "1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.775196 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-var-lock" (OuterVolumeSpecName: "var-lock") pod "1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" (UID: "1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.775312 4766 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.775335 4766 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-var-lock\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.816613 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" (UID: "1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:11:13 crc kubenswrapper[4766]: I0102 00:11:13.877500 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.264918 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0","Type":"ContainerDied","Data":"46fcb6f5aa9f6eed17d34c5bbe2ca817ab0c53327f86c81db75fe8df3113a436"} Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.264965 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.264993 4766 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46fcb6f5aa9f6eed17d34c5bbe2ca817ab0c53327f86c81db75fe8df3113a436" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.279420 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.279911 4766 status_manager.go:851] "Failed to get status for pod" podUID="30d047be-f175-4f90-b77a-34a24fea709d" pod="openshift-marketplace/certified-operators-djhdl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djhdl\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.280396 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.280743 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.312146 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-djhdl" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.312787 4766 status_manager.go:851] "Failed to get status for pod" podUID="30d047be-f175-4f90-b77a-34a24fea709d" pod="openshift-marketplace/certified-operators-djhdl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djhdl\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.313355 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.313704 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.314222 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.862533 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-x4q7d" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.862902 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-x4q7d" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.908338 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-x4q7d" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.908791 4766 status_manager.go:851] "Failed to get status for pod" podUID="30d047be-f175-4f90-b77a-34a24fea709d" pod="openshift-marketplace/certified-operators-djhdl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djhdl\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.909140 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.909455 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.909765 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:14 crc kubenswrapper[4766]: I0102 00:11:14.910030 4766 status_manager.go:851] "Failed to get status for pod" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" pod="openshift-marketplace/redhat-marketplace-x4q7d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-x4q7d\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:15 crc kubenswrapper[4766]: I0102 00:11:15.332518 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-x4q7d" Jan 02 00:11:15 crc kubenswrapper[4766]: I0102 00:11:15.333051 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:15 crc kubenswrapper[4766]: I0102 00:11:15.333372 4766 status_manager.go:851] "Failed to get status for pod" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" pod="openshift-marketplace/redhat-marketplace-x4q7d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-x4q7d\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:15 crc kubenswrapper[4766]: I0102 00:11:15.333826 4766 status_manager.go:851] "Failed to get status for pod" podUID="30d047be-f175-4f90-b77a-34a24fea709d" pod="openshift-marketplace/certified-operators-djhdl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djhdl\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:15 crc kubenswrapper[4766]: I0102 00:11:15.334175 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:15 crc kubenswrapper[4766]: I0102 00:11:15.334467 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:15 crc kubenswrapper[4766]: I0102 00:11:15.539223 4766 status_manager.go:851] "Failed to get status for pod" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" pod="openshift-marketplace/redhat-marketplace-x4q7d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-x4q7d\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:15 crc kubenswrapper[4766]: I0102 00:11:15.539925 4766 status_manager.go:851] "Failed to get status for pod" podUID="30d047be-f175-4f90-b77a-34a24fea709d" pod="openshift-marketplace/certified-operators-djhdl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djhdl\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:15 crc kubenswrapper[4766]: I0102 00:11:15.540402 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:15 crc kubenswrapper[4766]: I0102 00:11:15.541081 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:15 crc kubenswrapper[4766]: I0102 00:11:15.541444 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:18 crc kubenswrapper[4766]: I0102 00:11:18.291160 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 02 00:11:18 crc kubenswrapper[4766]: I0102 00:11:18.293455 4766 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee" exitCode=0 Jan 02 00:11:18 crc kubenswrapper[4766]: I0102 00:11:18.879799 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 02 00:11:18 crc kubenswrapper[4766]: I0102 00:11:18.880782 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:18 crc kubenswrapper[4766]: I0102 00:11:18.881475 4766 status_manager.go:851] "Failed to get status for pod" podUID="30d047be-f175-4f90-b77a-34a24fea709d" pod="openshift-marketplace/certified-operators-djhdl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djhdl\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:18 crc kubenswrapper[4766]: I0102 00:11:18.881980 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:18 crc kubenswrapper[4766]: I0102 00:11:18.882458 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:18 crc kubenswrapper[4766]: I0102 00:11:18.882723 4766 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:18 crc kubenswrapper[4766]: I0102 00:11:18.883039 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:18 crc kubenswrapper[4766]: I0102 00:11:18.883357 4766 status_manager.go:851] "Failed to get status for pod" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" pod="openshift-marketplace/redhat-marketplace-x4q7d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-x4q7d\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.038544 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.038606 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.038684 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.038714 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.038735 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.038825 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.039105 4766 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.039133 4766 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.039145 4766 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.305101 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.307011 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.307011 4766 scope.go:117] "RemoveContainer" containerID="9f42cbeb5b41c2f74db4883cfc60ed8f0517bb49504fd75027afa4e010e6605e" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.327878 4766 scope.go:117] "RemoveContainer" containerID="33732b89a857ae4e5235cea9972c9b26ea991107474a166d9dcfb79c9ebd9087" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.333262 4766 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.334535 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.335233 4766 status_manager.go:851] "Failed to get status for pod" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" pod="openshift-marketplace/redhat-marketplace-x4q7d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-x4q7d\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.335645 4766 status_manager.go:851] "Failed to get status for pod" podUID="30d047be-f175-4f90-b77a-34a24fea709d" pod="openshift-marketplace/certified-operators-djhdl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djhdl\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.336150 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.336513 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:19 crc kubenswrapper[4766]: E0102 00:11:19.347647 4766 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.348045 4766 scope.go:117] "RemoveContainer" containerID="6b9f62974b23031f908b4c6e6752f03ea57cfbc0a5f5fe44a9ffcbf7dd4e86aa" Jan 02 00:11:19 crc kubenswrapper[4766]: E0102 00:11:19.348776 4766 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:19 crc kubenswrapper[4766]: E0102 00:11:19.349731 4766 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:19 crc kubenswrapper[4766]: E0102 00:11:19.350199 4766 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:19 crc kubenswrapper[4766]: E0102 00:11:19.350797 4766 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.350841 4766 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 02 00:11:19 crc kubenswrapper[4766]: E0102 00:11:19.351167 4766 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" interval="200ms" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.366816 4766 scope.go:117] "RemoveContainer" containerID="503d44dadcb56a99d9fd63f6e09049a01ef2faf7ebffcd91aee661866da073cd" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.380995 4766 scope.go:117] "RemoveContainer" containerID="5d94f7bb52f8e8733758abcde65e67517b783dc516fc5cc7663d87dfef96a9ee" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.398635 4766 scope.go:117] "RemoveContainer" containerID="a8a77ba9c10ce0f4424487dd37e4daa48e129f318d79c6b5969a4c0dd3295205" Jan 02 00:11:19 crc kubenswrapper[4766]: E0102 00:11:19.426223 4766 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.200:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1886c182f3f6f3ca openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Created,Message:Created container startup-monitor,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-02 00:11:11.97245537 +0000 UTC m=+246.770942554,LastTimestamp:2026-01-02 00:11:11.97245537 +0000 UTC m=+246.770942554,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 02 00:11:19 crc kubenswrapper[4766]: I0102 00:11:19.539814 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 02 00:11:19 crc kubenswrapper[4766]: E0102 00:11:19.552171 4766 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" interval="400ms" Jan 02 00:11:19 crc kubenswrapper[4766]: E0102 00:11:19.953002 4766 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" interval="800ms" Jan 02 00:11:20 crc kubenswrapper[4766]: E0102 00:11:20.754639 4766 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" interval="1.6s" Jan 02 00:11:22 crc kubenswrapper[4766]: I0102 00:11:22.259359 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" podUID="35c9a208-651e-4453-975f-2d069276a9aa" containerName="oauth-openshift" containerID="cri-o://09beee16b1a97bc8b94cc4cf1b0cba1ee0667d6050711a617c1ab5244753cdb7" gracePeriod=15 Jan 02 00:11:22 crc kubenswrapper[4766]: E0102 00:11:22.356122 4766 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" interval="3.2s" Jan 02 00:11:23 crc kubenswrapper[4766]: I0102 00:11:23.334700 4766 generic.go:334] "Generic (PLEG): container finished" podID="35c9a208-651e-4453-975f-2d069276a9aa" containerID="09beee16b1a97bc8b94cc4cf1b0cba1ee0667d6050711a617c1ab5244753cdb7" exitCode=0 Jan 02 00:11:23 crc kubenswrapper[4766]: I0102 00:11:23.334794 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" event={"ID":"35c9a208-651e-4453-975f-2d069276a9aa","Type":"ContainerDied","Data":"09beee16b1a97bc8b94cc4cf1b0cba1ee0667d6050711a617c1ab5244753cdb7"} Jan 02 00:11:23 crc kubenswrapper[4766]: I0102 00:11:23.893805 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:11:23 crc kubenswrapper[4766]: I0102 00:11:23.894477 4766 status_manager.go:851] "Failed to get status for pod" podUID="30d047be-f175-4f90-b77a-34a24fea709d" pod="openshift-marketplace/certified-operators-djhdl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djhdl\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:23 crc kubenswrapper[4766]: I0102 00:11:23.894737 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:23 crc kubenswrapper[4766]: I0102 00:11:23.894943 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:23 crc kubenswrapper[4766]: I0102 00:11:23.895141 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:23 crc kubenswrapper[4766]: I0102 00:11:23.895423 4766 status_manager.go:851] "Failed to get status for pod" podUID="35c9a208-651e-4453-975f-2d069276a9aa" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-czkht\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:23 crc kubenswrapper[4766]: I0102 00:11:23.895768 4766 status_manager.go:851] "Failed to get status for pod" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" pod="openshift-marketplace/redhat-marketplace-x4q7d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-x4q7d\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.010330 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-login\") pod \"35c9a208-651e-4453-975f-2d069276a9aa\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.010443 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-service-ca\") pod \"35c9a208-651e-4453-975f-2d069276a9aa\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.010507 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/35c9a208-651e-4453-975f-2d069276a9aa-audit-dir\") pod \"35c9a208-651e-4453-975f-2d069276a9aa\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.010618 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-session\") pod \"35c9a208-651e-4453-975f-2d069276a9aa\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.010609 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/35c9a208-651e-4453-975f-2d069276a9aa-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "35c9a208-651e-4453-975f-2d069276a9aa" (UID: "35c9a208-651e-4453-975f-2d069276a9aa"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.011314 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "35c9a208-651e-4453-975f-2d069276a9aa" (UID: "35c9a208-651e-4453-975f-2d069276a9aa"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.012175 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68jhz\" (UniqueName: \"kubernetes.io/projected/35c9a208-651e-4453-975f-2d069276a9aa-kube-api-access-68jhz\") pod \"35c9a208-651e-4453-975f-2d069276a9aa\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.012240 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-cliconfig\") pod \"35c9a208-651e-4453-975f-2d069276a9aa\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.012292 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-audit-policies\") pod \"35c9a208-651e-4453-975f-2d069276a9aa\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.012358 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-router-certs\") pod \"35c9a208-651e-4453-975f-2d069276a9aa\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.012551 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-provider-selection\") pod \"35c9a208-651e-4453-975f-2d069276a9aa\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.012619 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-idp-0-file-data\") pod \"35c9a208-651e-4453-975f-2d069276a9aa\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.012660 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "35c9a208-651e-4453-975f-2d069276a9aa" (UID: "35c9a208-651e-4453-975f-2d069276a9aa"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.012827 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-error\") pod \"35c9a208-651e-4453-975f-2d069276a9aa\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.012879 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "35c9a208-651e-4453-975f-2d069276a9aa" (UID: "35c9a208-651e-4453-975f-2d069276a9aa"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.012920 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-ocp-branding-template\") pod \"35c9a208-651e-4453-975f-2d069276a9aa\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.013070 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-trusted-ca-bundle\") pod \"35c9a208-651e-4453-975f-2d069276a9aa\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.013131 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-serving-cert\") pod \"35c9a208-651e-4453-975f-2d069276a9aa\" (UID: \"35c9a208-651e-4453-975f-2d069276a9aa\") " Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.013811 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "35c9a208-651e-4453-975f-2d069276a9aa" (UID: "35c9a208-651e-4453-975f-2d069276a9aa"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.013984 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.014035 4766 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/35c9a208-651e-4453-975f-2d069276a9aa-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.014065 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.014092 4766 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.017236 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "35c9a208-651e-4453-975f-2d069276a9aa" (UID: "35c9a208-651e-4453-975f-2d069276a9aa"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.017887 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "35c9a208-651e-4453-975f-2d069276a9aa" (UID: "35c9a208-651e-4453-975f-2d069276a9aa"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.017938 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35c9a208-651e-4453-975f-2d069276a9aa-kube-api-access-68jhz" (OuterVolumeSpecName: "kube-api-access-68jhz") pod "35c9a208-651e-4453-975f-2d069276a9aa" (UID: "35c9a208-651e-4453-975f-2d069276a9aa"). InnerVolumeSpecName "kube-api-access-68jhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.018023 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "35c9a208-651e-4453-975f-2d069276a9aa" (UID: "35c9a208-651e-4453-975f-2d069276a9aa"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.023049 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "35c9a208-651e-4453-975f-2d069276a9aa" (UID: "35c9a208-651e-4453-975f-2d069276a9aa"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.025282 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "35c9a208-651e-4453-975f-2d069276a9aa" (UID: "35c9a208-651e-4453-975f-2d069276a9aa"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.025752 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "35c9a208-651e-4453-975f-2d069276a9aa" (UID: "35c9a208-651e-4453-975f-2d069276a9aa"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.026047 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "35c9a208-651e-4453-975f-2d069276a9aa" (UID: "35c9a208-651e-4453-975f-2d069276a9aa"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.027516 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "35c9a208-651e-4453-975f-2d069276a9aa" (UID: "35c9a208-651e-4453-975f-2d069276a9aa"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.115751 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.115784 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68jhz\" (UniqueName: \"kubernetes.io/projected/35c9a208-651e-4453-975f-2d069276a9aa-kube-api-access-68jhz\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.115793 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.115805 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.115818 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.115827 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.115836 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.115866 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.115875 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.115886 4766 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/35c9a208-651e-4453-975f-2d069276a9aa-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.343763 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" event={"ID":"35c9a208-651e-4453-975f-2d069276a9aa","Type":"ContainerDied","Data":"05e362ea7804f619280f15d1664181659dfb6c5277a1755afccb59d9648c2b46"} Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.343928 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.343961 4766 scope.go:117] "RemoveContainer" containerID="09beee16b1a97bc8b94cc4cf1b0cba1ee0667d6050711a617c1ab5244753cdb7" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.345209 4766 status_manager.go:851] "Failed to get status for pod" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" pod="openshift-marketplace/redhat-marketplace-x4q7d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-x4q7d\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.345772 4766 status_manager.go:851] "Failed to get status for pod" podUID="30d047be-f175-4f90-b77a-34a24fea709d" pod="openshift-marketplace/certified-operators-djhdl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djhdl\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.346498 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.349059 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.350003 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.350520 4766 status_manager.go:851] "Failed to get status for pod" podUID="35c9a208-651e-4453-975f-2d069276a9aa" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-czkht\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.368195 4766 status_manager.go:851] "Failed to get status for pod" podUID="30d047be-f175-4f90-b77a-34a24fea709d" pod="openshift-marketplace/certified-operators-djhdl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djhdl\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.369047 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.369512 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.370096 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.370501 4766 status_manager.go:851] "Failed to get status for pod" podUID="35c9a208-651e-4453-975f-2d069276a9aa" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-czkht\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.371149 4766 status_manager.go:851] "Failed to get status for pod" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" pod="openshift-marketplace/redhat-marketplace-x4q7d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-x4q7d\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.533483 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.535547 4766 status_manager.go:851] "Failed to get status for pod" podUID="30d047be-f175-4f90-b77a-34a24fea709d" pod="openshift-marketplace/certified-operators-djhdl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djhdl\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.540237 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.542902 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.543623 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.544490 4766 status_manager.go:851] "Failed to get status for pod" podUID="35c9a208-651e-4453-975f-2d069276a9aa" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-czkht\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.545053 4766 status_manager.go:851] "Failed to get status for pod" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" pod="openshift-marketplace/redhat-marketplace-x4q7d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-x4q7d\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.556242 4766 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cad8c8c8-e3a5-4066-821f-eb884896d207" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.556303 4766 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cad8c8c8-e3a5-4066-821f-eb884896d207" Jan 02 00:11:24 crc kubenswrapper[4766]: E0102 00:11:24.556896 4766 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:24 crc kubenswrapper[4766]: I0102 00:11:24.557507 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.365100 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.365174 4766 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34" exitCode=1 Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.365253 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34"} Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.365822 4766 scope.go:117] "RemoveContainer" containerID="e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.366475 4766 status_manager.go:851] "Failed to get status for pod" podUID="30d047be-f175-4f90-b77a-34a24fea709d" pod="openshift-marketplace/certified-operators-djhdl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djhdl\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.367075 4766 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.367435 4766 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="1a7c3774e1d0a437c421a6d57bf47537fb7aebd3486b32e6bfd7bb7e2454f4d7" exitCode=0 Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.367476 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"1a7c3774e1d0a437c421a6d57bf47537fb7aebd3486b32e6bfd7bb7e2454f4d7"} Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.367491 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.367509 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"6a1b3babdf4118e09765eee249eebe6b3bb42e75de7d356fc4156afa766cfe71"} Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.367824 4766 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cad8c8c8-e3a5-4066-821f-eb884896d207" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.367905 4766 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cad8c8c8-e3a5-4066-821f-eb884896d207" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.367904 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.368321 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: E0102 00:11:25.368381 4766 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.368755 4766 status_manager.go:851] "Failed to get status for pod" podUID="35c9a208-651e-4453-975f-2d069276a9aa" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-czkht\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.369059 4766 status_manager.go:851] "Failed to get status for pod" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" pod="openshift-marketplace/redhat-marketplace-x4q7d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-x4q7d\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.369382 4766 status_manager.go:851] "Failed to get status for pod" podUID="35c9a208-651e-4453-975f-2d069276a9aa" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-czkht\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.369648 4766 status_manager.go:851] "Failed to get status for pod" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" pod="openshift-marketplace/redhat-marketplace-x4q7d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-x4q7d\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.369933 4766 status_manager.go:851] "Failed to get status for pod" podUID="30d047be-f175-4f90-b77a-34a24fea709d" pod="openshift-marketplace/certified-operators-djhdl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djhdl\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.370187 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.370576 4766 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.371022 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.371462 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.540353 4766 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.540641 4766 status_manager.go:851] "Failed to get status for pod" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.540915 4766 status_manager.go:851] "Failed to get status for pod" podUID="af16157a-9cb1-4df3-a0c1-66839e592859" pod="openshift-marketplace/community-operators-vwvkb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-vwvkb\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.541489 4766 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.542374 4766 status_manager.go:851] "Failed to get status for pod" podUID="35c9a208-651e-4453-975f-2d069276a9aa" pod="openshift-authentication/oauth-openshift-558db77b4-czkht" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-czkht\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.542630 4766 status_manager.go:851] "Failed to get status for pod" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" pod="openshift-marketplace/redhat-marketplace-x4q7d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-x4q7d\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.542829 4766 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: I0102 00:11:25.543114 4766 status_manager.go:851] "Failed to get status for pod" podUID="30d047be-f175-4f90-b77a-34a24fea709d" pod="openshift-marketplace/certified-operators-djhdl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djhdl\": dial tcp 38.102.83.200:6443: connect: connection refused" Jan 02 00:11:25 crc kubenswrapper[4766]: E0102 00:11:25.557766 4766 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" interval="6.4s" Jan 02 00:11:26 crc kubenswrapper[4766]: I0102 00:11:26.380727 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 02 00:11:26 crc kubenswrapper[4766]: I0102 00:11:26.381087 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"02e21b056fd9fd590bd70144553b79b8789e74e773801711506d3b1abedd3ab8"} Jan 02 00:11:26 crc kubenswrapper[4766]: I0102 00:11:26.384497 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"e30a030ab109fa80bd11273ae8f2566755795d7dfffd86ea86493624b9b0c961"} Jan 02 00:11:26 crc kubenswrapper[4766]: I0102 00:11:26.384527 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"28a44802fb03e07097d789e806737f54939f9ae623f5c65f8995f2f1a5278f3f"} Jan 02 00:11:26 crc kubenswrapper[4766]: I0102 00:11:26.384540 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"70184d318e674e50122e563e99eaaad7562dad76db149cd08463f89df07b10af"} Jan 02 00:11:27 crc kubenswrapper[4766]: I0102 00:11:27.395680 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"35f247a9f0de4accdfe75323573e70eaa1fd52e6f83a5e34d4f389a9c9c5f02b"} Jan 02 00:11:27 crc kubenswrapper[4766]: I0102 00:11:27.396102 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d41d8662932bd7e76e54e44e97b0a6f0441886be51163558198ebdeea33bffff"} Jan 02 00:11:27 crc kubenswrapper[4766]: I0102 00:11:27.396135 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:27 crc kubenswrapper[4766]: I0102 00:11:27.395950 4766 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cad8c8c8-e3a5-4066-821f-eb884896d207" Jan 02 00:11:27 crc kubenswrapper[4766]: I0102 00:11:27.396167 4766 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cad8c8c8-e3a5-4066-821f-eb884896d207" Jan 02 00:11:28 crc kubenswrapper[4766]: I0102 00:11:28.150733 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:11:28 crc kubenswrapper[4766]: I0102 00:11:28.151967 4766 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 02 00:11:28 crc kubenswrapper[4766]: I0102 00:11:28.152021 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 02 00:11:29 crc kubenswrapper[4766]: I0102 00:11:29.558729 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:29 crc kubenswrapper[4766]: I0102 00:11:29.558770 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:29 crc kubenswrapper[4766]: I0102 00:11:29.563477 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:32 crc kubenswrapper[4766]: I0102 00:11:32.160321 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:11:32 crc kubenswrapper[4766]: I0102 00:11:32.639388 4766 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:32 crc kubenswrapper[4766]: I0102 00:11:32.727447 4766 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c758ed80-f8f6-4839-b6b5-b4fa53ce8be3" Jan 02 00:11:33 crc kubenswrapper[4766]: I0102 00:11:33.432469 4766 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cad8c8c8-e3a5-4066-821f-eb884896d207" Jan 02 00:11:33 crc kubenswrapper[4766]: I0102 00:11:33.433622 4766 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cad8c8c8-e3a5-4066-821f-eb884896d207" Jan 02 00:11:33 crc kubenswrapper[4766]: I0102 00:11:33.435010 4766 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c758ed80-f8f6-4839-b6b5-b4fa53ce8be3" Jan 02 00:11:33 crc kubenswrapper[4766]: I0102 00:11:33.436236 4766 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://70184d318e674e50122e563e99eaaad7562dad76db149cd08463f89df07b10af" Jan 02 00:11:33 crc kubenswrapper[4766]: I0102 00:11:33.436256 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.440207 4766 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cad8c8c8-e3a5-4066-821f-eb884896d207" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.440320 4766 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cad8c8c8-e3a5-4066-821f-eb884896d207" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.443573 4766 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c758ed80-f8f6-4839-b6b5-b4fa53ce8be3" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.575831 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.576387 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.576494 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.576553 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.578581 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.578581 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.579293 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.588193 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.592671 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.595901 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.600539 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.601835 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.657069 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.685917 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.695293 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 02 00:11:34 crc kubenswrapper[4766]: I0102 00:11:34.865042 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-krqht" podUID="bbc14cde-7795-4cb5-a9e0-0c2500835f44" containerName="registry" containerID="cri-o://f1fbf4730b1ec3252c2624a174674dec13b28efec7d58fc775108f67882b56cc" gracePeriod=30 Jan 02 00:11:35 crc kubenswrapper[4766]: W0102 00:11:35.170647 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-b65e2e3feae706c0896fd26a730146f580fb90f87eb74860e625cdb49105f83c WatchSource:0}: Error finding container b65e2e3feae706c0896fd26a730146f580fb90f87eb74860e625cdb49105f83c: Status 404 returned error can't find the container with id b65e2e3feae706c0896fd26a730146f580fb90f87eb74860e625cdb49105f83c Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.178242 4766 patch_prober.go:28] interesting pod/image-registry-697d97f7c8-krqht container/registry namespace/openshift-image-registry: Readiness probe status=failure output="Get \"https://10.217.0.34:5000/healthz\": dial tcp 10.217.0.34:5000: connect: connection refused" start-of-body= Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.178302 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-image-registry/image-registry-697d97f7c8-krqht" podUID="bbc14cde-7795-4cb5-a9e0-0c2500835f44" containerName="registry" probeResult="failure" output="Get \"https://10.217.0.34:5000/healthz\": dial tcp 10.217.0.34:5000: connect: connection refused" Jan 02 00:11:35 crc kubenswrapper[4766]: W0102 00:11:35.246932 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-833135f6a469eb3238d771218d82638d649f26d4883407926a1e2e15a2acb0d3 WatchSource:0}: Error finding container 833135f6a469eb3238d771218d82638d649f26d4883407926a1e2e15a2acb0d3: Status 404 returned error can't find the container with id 833135f6a469eb3238d771218d82638d649f26d4883407926a1e2e15a2acb0d3 Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.447245 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"261e3c99a0ae692e6453a5c04f6d817cdefb527b2fe93327ba7dec8ac97d4664"} Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.447294 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"b65e2e3feae706c0896fd26a730146f580fb90f87eb74860e625cdb49105f83c"} Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.450625 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"17c3f80e41e97485c995d259c23a75f00ff061b66fd7f56748479c7a27c7e220"} Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.450660 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"833135f6a469eb3238d771218d82638d649f26d4883407926a1e2e15a2acb0d3"} Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.453289 4766 generic.go:334] "Generic (PLEG): container finished" podID="bbc14cde-7795-4cb5-a9e0-0c2500835f44" containerID="f1fbf4730b1ec3252c2624a174674dec13b28efec7d58fc775108f67882b56cc" exitCode=0 Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.453379 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-krqht" event={"ID":"bbc14cde-7795-4cb5-a9e0-0c2500835f44","Type":"ContainerDied","Data":"f1fbf4730b1ec3252c2624a174674dec13b28efec7d58fc775108f67882b56cc"} Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.456090 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"b35e795aeb8cb7f11c0df8a222dc0b08e9d6ad2b198e5272675b6ce25d7fd4d5"} Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.456123 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"e8cc2a06a230f6d2d059efd1ad2b29472aba8ea838b10ff9c1698d189c8208b2"} Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.846435 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.894916 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bbc14cde-7795-4cb5-a9e0-0c2500835f44-ca-trust-extracted\") pod \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.895141 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.895211 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bbc14cde-7795-4cb5-a9e0-0c2500835f44-registry-certificates\") pod \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.895285 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-registry-tls\") pod \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.896240 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bbc14cde-7795-4cb5-a9e0-0c2500835f44-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "bbc14cde-7795-4cb5-a9e0-0c2500835f44" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.896453 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bbc14cde-7795-4cb5-a9e0-0c2500835f44-trusted-ca\") pod \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.896547 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppqxm\" (UniqueName: \"kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-kube-api-access-ppqxm\") pod \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.896897 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bbc14cde-7795-4cb5-a9e0-0c2500835f44-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bbc14cde-7795-4cb5-a9e0-0c2500835f44" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.897342 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-bound-sa-token\") pod \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.897425 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bbc14cde-7795-4cb5-a9e0-0c2500835f44-installation-pull-secrets\") pod \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\" (UID: \"bbc14cde-7795-4cb5-a9e0-0c2500835f44\") " Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.897832 4766 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bbc14cde-7795-4cb5-a9e0-0c2500835f44-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.897885 4766 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bbc14cde-7795-4cb5-a9e0-0c2500835f44-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.899346 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "bbc14cde-7795-4cb5-a9e0-0c2500835f44" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.900186 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-kube-api-access-ppqxm" (OuterVolumeSpecName: "kube-api-access-ppqxm") pod "bbc14cde-7795-4cb5-a9e0-0c2500835f44" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44"). InnerVolumeSpecName "kube-api-access-ppqxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.900613 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbc14cde-7795-4cb5-a9e0-0c2500835f44-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "bbc14cde-7795-4cb5-a9e0-0c2500835f44" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.903384 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bbc14cde-7795-4cb5-a9e0-0c2500835f44" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.908713 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "bbc14cde-7795-4cb5-a9e0-0c2500835f44" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.919501 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bbc14cde-7795-4cb5-a9e0-0c2500835f44-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "bbc14cde-7795-4cb5-a9e0-0c2500835f44" (UID: "bbc14cde-7795-4cb5-a9e0-0c2500835f44"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.998904 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppqxm\" (UniqueName: \"kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-kube-api-access-ppqxm\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.998941 4766 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.998954 4766 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bbc14cde-7795-4cb5-a9e0-0c2500835f44-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.998966 4766 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bbc14cde-7795-4cb5-a9e0-0c2500835f44-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:35 crc kubenswrapper[4766]: I0102 00:11:35.998977 4766 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bbc14cde-7795-4cb5-a9e0-0c2500835f44-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:36 crc kubenswrapper[4766]: I0102 00:11:36.464334 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-krqht" event={"ID":"bbc14cde-7795-4cb5-a9e0-0c2500835f44","Type":"ContainerDied","Data":"68dd47fea876ee32606375dc69a3ed2aeeb2eb4e94afea126ea2fe16c7940a59"} Jan 02 00:11:36 crc kubenswrapper[4766]: I0102 00:11:36.464385 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-krqht" Jan 02 00:11:36 crc kubenswrapper[4766]: I0102 00:11:36.464418 4766 scope.go:117] "RemoveContainer" containerID="f1fbf4730b1ec3252c2624a174674dec13b28efec7d58fc775108f67882b56cc" Jan 02 00:11:36 crc kubenswrapper[4766]: I0102 00:11:36.465790 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:11:37 crc kubenswrapper[4766]: I0102 00:11:37.471075 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/0.log" Jan 02 00:11:37 crc kubenswrapper[4766]: I0102 00:11:37.471402 4766 generic.go:334] "Generic (PLEG): container finished" podID="9d751cbb-f2e2-430d-9754-c882a5e924a5" containerID="261e3c99a0ae692e6453a5c04f6d817cdefb527b2fe93327ba7dec8ac97d4664" exitCode=255 Jan 02 00:11:37 crc kubenswrapper[4766]: I0102 00:11:37.471479 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerDied","Data":"261e3c99a0ae692e6453a5c04f6d817cdefb527b2fe93327ba7dec8ac97d4664"} Jan 02 00:11:37 crc kubenswrapper[4766]: I0102 00:11:37.471914 4766 scope.go:117] "RemoveContainer" containerID="261e3c99a0ae692e6453a5c04f6d817cdefb527b2fe93327ba7dec8ac97d4664" Jan 02 00:11:38 crc kubenswrapper[4766]: I0102 00:11:38.140248 4766 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 02 00:11:38 crc kubenswrapper[4766]: I0102 00:11:38.140350 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 02 00:11:38 crc kubenswrapper[4766]: E0102 00:11:38.272013 4766 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-conmon-45f75556941a0014065ddf2cc638d4ebb3b9475ba9868400cca8bb98af7d03f0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-45f75556941a0014065ddf2cc638d4ebb3b9475ba9868400cca8bb98af7d03f0.scope\": RecentStats: unable to find data in memory cache]" Jan 02 00:11:38 crc kubenswrapper[4766]: I0102 00:11:38.480042 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/1.log" Jan 02 00:11:38 crc kubenswrapper[4766]: I0102 00:11:38.480725 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/0.log" Jan 02 00:11:38 crc kubenswrapper[4766]: I0102 00:11:38.480766 4766 generic.go:334] "Generic (PLEG): container finished" podID="9d751cbb-f2e2-430d-9754-c882a5e924a5" containerID="45f75556941a0014065ddf2cc638d4ebb3b9475ba9868400cca8bb98af7d03f0" exitCode=255 Jan 02 00:11:38 crc kubenswrapper[4766]: I0102 00:11:38.480796 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerDied","Data":"45f75556941a0014065ddf2cc638d4ebb3b9475ba9868400cca8bb98af7d03f0"} Jan 02 00:11:38 crc kubenswrapper[4766]: I0102 00:11:38.480836 4766 scope.go:117] "RemoveContainer" containerID="261e3c99a0ae692e6453a5c04f6d817cdefb527b2fe93327ba7dec8ac97d4664" Jan 02 00:11:38 crc kubenswrapper[4766]: I0102 00:11:38.481413 4766 scope.go:117] "RemoveContainer" containerID="45f75556941a0014065ddf2cc638d4ebb3b9475ba9868400cca8bb98af7d03f0" Jan 02 00:11:38 crc kubenswrapper[4766]: E0102 00:11:38.481809 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=check-endpoints pod=network-check-source-55646444c4-trplf_openshift-network-diagnostics(9d751cbb-f2e2-430d-9754-c882a5e924a5)\"" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:11:39 crc kubenswrapper[4766]: I0102 00:11:39.487422 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/1.log" Jan 02 00:11:42 crc kubenswrapper[4766]: I0102 00:11:42.430181 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 02 00:11:42 crc kubenswrapper[4766]: I0102 00:11:42.985927 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 02 00:11:43 crc kubenswrapper[4766]: I0102 00:11:43.860756 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 02 00:11:43 crc kubenswrapper[4766]: I0102 00:11:43.918269 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 02 00:11:44 crc kubenswrapper[4766]: I0102 00:11:44.634020 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:44.826229 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:44.830155 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:44.845805 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:44.901327 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:44.942780 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:44.987592 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:45.009151 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:45.012592 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:45.254913 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:45.295153 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:45.485622 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:45.578009 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:45.579976 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:45.588883 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:45.671399 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:45.685540 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:45.754439 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:45.769309 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 02 00:11:45 crc kubenswrapper[4766]: I0102 00:11:45.881967 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 02 00:11:46 crc kubenswrapper[4766]: I0102 00:11:46.030216 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 02 00:11:46 crc kubenswrapper[4766]: I0102 00:11:46.132613 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 02 00:11:46 crc kubenswrapper[4766]: I0102 00:11:46.266784 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 02 00:11:46 crc kubenswrapper[4766]: I0102 00:11:46.296588 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 02 00:11:46 crc kubenswrapper[4766]: I0102 00:11:46.317619 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 02 00:11:46 crc kubenswrapper[4766]: I0102 00:11:46.388604 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 02 00:11:46 crc kubenswrapper[4766]: I0102 00:11:46.527521 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 02 00:11:46 crc kubenswrapper[4766]: I0102 00:11:46.550555 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 02 00:11:46 crc kubenswrapper[4766]: I0102 00:11:46.559708 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 02 00:11:46 crc kubenswrapper[4766]: I0102 00:11:46.580919 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 02 00:11:46 crc kubenswrapper[4766]: I0102 00:11:46.643659 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 02 00:11:46 crc kubenswrapper[4766]: I0102 00:11:46.655507 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 02 00:11:46 crc kubenswrapper[4766]: I0102 00:11:46.886207 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 02 00:11:46 crc kubenswrapper[4766]: I0102 00:11:46.895035 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 02 00:11:46 crc kubenswrapper[4766]: I0102 00:11:46.899876 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 02 00:11:46 crc kubenswrapper[4766]: I0102 00:11:46.922076 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 02 00:11:47 crc kubenswrapper[4766]: I0102 00:11:47.013053 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 02 00:11:47 crc kubenswrapper[4766]: I0102 00:11:47.061528 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 02 00:11:47 crc kubenswrapper[4766]: I0102 00:11:47.079398 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 02 00:11:47 crc kubenswrapper[4766]: I0102 00:11:47.114025 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 02 00:11:47 crc kubenswrapper[4766]: I0102 00:11:47.146859 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 02 00:11:47 crc kubenswrapper[4766]: I0102 00:11:47.244185 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 02 00:11:47 crc kubenswrapper[4766]: I0102 00:11:47.487143 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 02 00:11:47 crc kubenswrapper[4766]: I0102 00:11:47.495076 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 02 00:11:47 crc kubenswrapper[4766]: I0102 00:11:47.630681 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 02 00:11:47 crc kubenswrapper[4766]: I0102 00:11:47.694756 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 02 00:11:47 crc kubenswrapper[4766]: I0102 00:11:47.770882 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 02 00:11:47 crc kubenswrapper[4766]: I0102 00:11:47.884706 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 02 00:11:47 crc kubenswrapper[4766]: I0102 00:11:47.887139 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 02 00:11:47 crc kubenswrapper[4766]: I0102 00:11:47.894472 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 02 00:11:47 crc kubenswrapper[4766]: I0102 00:11:47.996589 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.139192 4766 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.139261 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.139321 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.139913 4766 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"02e21b056fd9fd590bd70144553b79b8789e74e773801711506d3b1abedd3ab8"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.140040 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://02e21b056fd9fd590bd70144553b79b8789e74e773801711506d3b1abedd3ab8" gracePeriod=30 Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.217093 4766 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.231780 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.241762 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.242790 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.270255 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.306030 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.390656 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.398996 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.430292 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.449900 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.489607 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.516474 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.549993 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.553180 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.621872 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.636395 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.657182 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 02 00:11:48 crc kubenswrapper[4766]: I0102 00:11:48.724809 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 02 00:11:49 crc kubenswrapper[4766]: I0102 00:11:49.104234 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 02 00:11:49 crc kubenswrapper[4766]: I0102 00:11:49.184289 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 02 00:11:49 crc kubenswrapper[4766]: I0102 00:11:49.232371 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 02 00:11:49 crc kubenswrapper[4766]: I0102 00:11:49.280886 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 02 00:11:49 crc kubenswrapper[4766]: I0102 00:11:49.424167 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 02 00:11:49 crc kubenswrapper[4766]: I0102 00:11:49.428690 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 02 00:11:49 crc kubenswrapper[4766]: I0102 00:11:49.431208 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 02 00:11:49 crc kubenswrapper[4766]: I0102 00:11:49.432443 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 02 00:11:49 crc kubenswrapper[4766]: I0102 00:11:49.479436 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 02 00:11:49 crc kubenswrapper[4766]: I0102 00:11:49.592901 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 02 00:11:49 crc kubenswrapper[4766]: I0102 00:11:49.696492 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 02 00:11:49 crc kubenswrapper[4766]: I0102 00:11:49.703506 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 02 00:11:49 crc kubenswrapper[4766]: I0102 00:11:49.712269 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 02 00:11:49 crc kubenswrapper[4766]: I0102 00:11:49.732987 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 02 00:11:49 crc kubenswrapper[4766]: I0102 00:11:49.785252 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 02 00:11:49 crc kubenswrapper[4766]: I0102 00:11:49.814920 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 02 00:11:49 crc kubenswrapper[4766]: I0102 00:11:49.920400 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.214463 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.251270 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.279274 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.401727 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.415749 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.468186 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.473339 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.493372 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.502131 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.502803 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.571245 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.573185 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.633625 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.640357 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.695215 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.752040 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.901738 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.927037 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.962226 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 02 00:11:50 crc kubenswrapper[4766]: I0102 00:11:50.966365 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.132068 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.168076 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.307408 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.385930 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.433377 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.534018 4766 scope.go:117] "RemoveContainer" containerID="45f75556941a0014065ddf2cc638d4ebb3b9475ba9868400cca8bb98af7d03f0" Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.549949 4766 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.554245 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=40.554226647 podStartE2EDuration="40.554226647s" podCreationTimestamp="2026-01-02 00:11:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:11:32.83644906 +0000 UTC m=+267.634936214" watchObservedRunningTime="2026-01-02 00:11:51.554226647 +0000 UTC m=+286.352713811" Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.556103 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-czkht","openshift-kube-apiserver/kube-apiserver-crc","openshift-image-registry/image-registry-697d97f7c8-krqht"] Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.556168 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.564003 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.577648 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.580821 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=19.580798021 podStartE2EDuration="19.580798021s" podCreationTimestamp="2026-01-02 00:11:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:11:51.578409376 +0000 UTC m=+286.376896530" watchObservedRunningTime="2026-01-02 00:11:51.580798021 +0000 UTC m=+286.379285175" Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.601498 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.648798 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.784102 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.883085 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 02 00:11:51 crc kubenswrapper[4766]: I0102 00:11:51.987227 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.040447 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.102106 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.118943 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.126399 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.137723 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.139814 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.173174 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.182678 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.233051 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.271262 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.288525 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.289046 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.401230 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.417102 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.469966 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.483568 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.490354 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.563901 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.567243 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/1.log" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.567347 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"b64df1d18979f16c29d23e0367dccef81d56c89ffa4b6ed897eb2f5de697fdbe"} Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.600621 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.626601 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.673457 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.709953 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.838614 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.908118 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.934406 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 02 00:11:52 crc kubenswrapper[4766]: I0102 00:11:52.935827 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.016246 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.038400 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.089947 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.090879 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.306778 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.362587 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.405132 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.468636 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.527417 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.541078 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35c9a208-651e-4453-975f-2d069276a9aa" path="/var/lib/kubelet/pods/35c9a208-651e-4453-975f-2d069276a9aa/volumes" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.542107 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbc14cde-7795-4cb5-a9e0-0c2500835f44" path="/var/lib/kubelet/pods/bbc14cde-7795-4cb5-a9e0-0c2500835f44/volumes" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.574288 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/2.log" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.574716 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/1.log" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.574865 4766 generic.go:334] "Generic (PLEG): container finished" podID="9d751cbb-f2e2-430d-9754-c882a5e924a5" containerID="b64df1d18979f16c29d23e0367dccef81d56c89ffa4b6ed897eb2f5de697fdbe" exitCode=255 Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.574977 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerDied","Data":"b64df1d18979f16c29d23e0367dccef81d56c89ffa4b6ed897eb2f5de697fdbe"} Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.575037 4766 scope.go:117] "RemoveContainer" containerID="45f75556941a0014065ddf2cc638d4ebb3b9475ba9868400cca8bb98af7d03f0" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.575684 4766 scope.go:117] "RemoveContainer" containerID="b64df1d18979f16c29d23e0367dccef81d56c89ffa4b6ed897eb2f5de697fdbe" Jan 02 00:11:53 crc kubenswrapper[4766]: E0102 00:11:53.576022 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"check-endpoints\" with CrashLoopBackOff: \"back-off 20s restarting failed container=check-endpoints pod=network-check-source-55646444c4-trplf_openshift-network-diagnostics(9d751cbb-f2e2-430d-9754-c882a5e924a5)\"" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.645744 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.658921 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.927556 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 02 00:11:53 crc kubenswrapper[4766]: I0102 00:11:53.957195 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.011042 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.065357 4766 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.065576 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://f7e2716442b76e0655a9600ffba742884cb635040f141129d51029fc81b2d7e5" gracePeriod=5 Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.090357 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.100996 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.161517 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.214503 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.236342 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.274378 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.297327 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.376558 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7794f9c69b-6fxbx"] Jan 02 00:11:54 crc kubenswrapper[4766]: E0102 00:11:54.377095 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.377169 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 02 00:11:54 crc kubenswrapper[4766]: E0102 00:11:54.377247 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35c9a208-651e-4453-975f-2d069276a9aa" containerName="oauth-openshift" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.377323 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="35c9a208-651e-4453-975f-2d069276a9aa" containerName="oauth-openshift" Jan 02 00:11:54 crc kubenswrapper[4766]: E0102 00:11:54.377408 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbc14cde-7795-4cb5-a9e0-0c2500835f44" containerName="registry" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.377472 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbc14cde-7795-4cb5-a9e0-0c2500835f44" containerName="registry" Jan 02 00:11:54 crc kubenswrapper[4766]: E0102 00:11:54.377538 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" containerName="installer" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.377602 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" containerName="installer" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.377746 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="35c9a208-651e-4453-975f-2d069276a9aa" containerName="oauth-openshift" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.377824 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.377913 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbc14cde-7795-4cb5-a9e0-0c2500835f44" containerName="registry" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.378003 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d9dd2d4-fecf-4f58-b757-a35f8d5c4ff0" containerName="installer" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.378483 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.383015 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.383607 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.383990 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.384155 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.384380 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.384445 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.385170 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.385319 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.385884 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.386008 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.386427 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.386667 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.389650 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.394287 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.396071 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.401065 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.404700 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.425680 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-audit-policies\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.425755 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.425977 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-router-certs\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.426268 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-service-ca\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.426364 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.426418 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.426444 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.426492 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.426547 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-audit-dir\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.426572 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-user-template-login\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.426642 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxq8m\" (UniqueName: \"kubernetes.io/projected/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-kube-api-access-cxq8m\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.426680 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.426710 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-user-template-error\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.426737 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-session\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.433705 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.437670 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.462361 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.520109 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.528469 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-router-certs\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.528539 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-service-ca\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.528568 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.528592 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.528612 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.528636 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.528672 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-audit-dir\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.528699 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-user-template-login\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.528738 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxq8m\" (UniqueName: \"kubernetes.io/projected/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-kube-api-access-cxq8m\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.528766 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.528789 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-user-template-error\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.528811 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-session\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.528860 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-audit-policies\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.528890 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.529991 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-audit-dir\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.530442 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-service-ca\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.530666 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-audit-policies\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.530997 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.531096 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.535246 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.535286 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.536048 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-user-template-login\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.538337 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-session\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.546250 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.546648 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-router-certs\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.547986 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.551008 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxq8m\" (UniqueName: \"kubernetes.io/projected/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-kube-api-access-cxq8m\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.551827 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f5b0122e-9bc8-4810-9ac2-e2352fe99edd-v4-0-config-user-template-error\") pod \"oauth-openshift-7794f9c69b-6fxbx\" (UID: \"f5b0122e-9bc8-4810-9ac2-e2352fe99edd\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.572298 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.582272 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/2.log" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.621184 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.656227 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.705198 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.717583 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.794059 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.810122 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.900212 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.973132 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 02 00:11:54 crc kubenswrapper[4766]: I0102 00:11:54.997238 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 02 00:11:55 crc kubenswrapper[4766]: I0102 00:11:55.003330 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 02 00:11:55 crc kubenswrapper[4766]: I0102 00:11:55.048303 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 02 00:11:55 crc kubenswrapper[4766]: I0102 00:11:55.093400 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 02 00:11:55 crc kubenswrapper[4766]: I0102 00:11:55.140725 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 02 00:11:55 crc kubenswrapper[4766]: I0102 00:11:55.179058 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 02 00:11:55 crc kubenswrapper[4766]: I0102 00:11:55.413191 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 02 00:11:55 crc kubenswrapper[4766]: I0102 00:11:55.557224 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 02 00:11:55 crc kubenswrapper[4766]: I0102 00:11:55.626414 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 02 00:11:56 crc kubenswrapper[4766]: I0102 00:11:56.025002 4766 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 02 00:11:56 crc kubenswrapper[4766]: I0102 00:11:56.071803 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 02 00:11:56 crc kubenswrapper[4766]: I0102 00:11:56.162629 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 02 00:11:56 crc kubenswrapper[4766]: I0102 00:11:56.284657 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 02 00:11:56 crc kubenswrapper[4766]: I0102 00:11:56.417057 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 02 00:11:56 crc kubenswrapper[4766]: I0102 00:11:56.454418 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 02 00:11:56 crc kubenswrapper[4766]: I0102 00:11:56.491290 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 02 00:11:56 crc kubenswrapper[4766]: I0102 00:11:56.546108 4766 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 02 00:11:56 crc kubenswrapper[4766]: I0102 00:11:56.550171 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 02 00:11:56 crc kubenswrapper[4766]: I0102 00:11:56.600430 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 02 00:11:56 crc kubenswrapper[4766]: I0102 00:11:56.614653 4766 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 02 00:11:56 crc kubenswrapper[4766]: I0102 00:11:56.736033 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 02 00:11:56 crc kubenswrapper[4766]: I0102 00:11:56.736920 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 02 00:11:56 crc kubenswrapper[4766]: I0102 00:11:56.882710 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 02 00:11:56 crc kubenswrapper[4766]: I0102 00:11:56.961972 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 02 00:11:56 crc kubenswrapper[4766]: I0102 00:11:56.994742 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 02 00:11:57 crc kubenswrapper[4766]: I0102 00:11:57.005270 4766 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 02 00:11:57 crc kubenswrapper[4766]: I0102 00:11:57.019977 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 02 00:11:57 crc kubenswrapper[4766]: I0102 00:11:57.102165 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 02 00:11:57 crc kubenswrapper[4766]: I0102 00:11:57.106814 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 02 00:11:57 crc kubenswrapper[4766]: I0102 00:11:57.249184 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 02 00:11:57 crc kubenswrapper[4766]: I0102 00:11:57.294311 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 02 00:11:57 crc kubenswrapper[4766]: I0102 00:11:57.350617 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 02 00:11:57 crc kubenswrapper[4766]: I0102 00:11:57.356244 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 02 00:11:57 crc kubenswrapper[4766]: I0102 00:11:57.388993 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 02 00:11:57 crc kubenswrapper[4766]: I0102 00:11:57.411668 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 02 00:11:57 crc kubenswrapper[4766]: I0102 00:11:57.611868 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 02 00:11:57 crc kubenswrapper[4766]: I0102 00:11:57.728147 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 02 00:11:57 crc kubenswrapper[4766]: I0102 00:11:57.735201 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 02 00:11:57 crc kubenswrapper[4766]: I0102 00:11:57.759508 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 02 00:11:57 crc kubenswrapper[4766]: I0102 00:11:57.837127 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 02 00:11:57 crc kubenswrapper[4766]: I0102 00:11:57.848182 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 02 00:11:58 crc kubenswrapper[4766]: I0102 00:11:58.176687 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 02 00:11:58 crc kubenswrapper[4766]: I0102 00:11:58.353269 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 02 00:11:58 crc kubenswrapper[4766]: I0102 00:11:58.378397 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 02 00:11:58 crc kubenswrapper[4766]: I0102 00:11:58.430626 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 02 00:11:58 crc kubenswrapper[4766]: I0102 00:11:58.629764 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 02 00:11:58 crc kubenswrapper[4766]: I0102 00:11:58.771955 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.125913 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.615111 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.615170 4766 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="f7e2716442b76e0655a9600ffba742884cb635040f141129d51029fc81b2d7e5" exitCode=137 Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.615214 4766 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67549b7ac9b9d7a6310d464ebf86351d1d4b01ee69b12cb3e09ae928c98cec97" Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.642029 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.642110 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.703322 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.703390 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.703484 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.703489 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.703574 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.703628 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.703655 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.704108 4766 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.704132 4766 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.704210 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.704322 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.710545 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.806030 4766 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.806073 4766 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 02 00:11:59 crc kubenswrapper[4766]: I0102 00:11:59.806083 4766 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 02 00:12:00 crc kubenswrapper[4766]: I0102 00:12:00.119369 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 02 00:12:00 crc kubenswrapper[4766]: I0102 00:12:00.174322 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7794f9c69b-6fxbx"] Jan 02 00:12:00 crc kubenswrapper[4766]: I0102 00:12:00.623607 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 02 00:12:00 crc kubenswrapper[4766]: I0102 00:12:00.623721 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7794f9c69b-6fxbx"] Jan 02 00:12:01 crc kubenswrapper[4766]: I0102 00:12:01.142487 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 02 00:12:01 crc kubenswrapper[4766]: I0102 00:12:01.545264 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 02 00:12:01 crc kubenswrapper[4766]: I0102 00:12:01.545884 4766 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Jan 02 00:12:01 crc kubenswrapper[4766]: I0102 00:12:01.558677 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 02 00:12:01 crc kubenswrapper[4766]: I0102 00:12:01.558722 4766 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="c768cde4-d9bb-4fca-9f4d-61953cbc75e6" Jan 02 00:12:01 crc kubenswrapper[4766]: I0102 00:12:01.565602 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 02 00:12:01 crc kubenswrapper[4766]: I0102 00:12:01.565643 4766 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="c768cde4-d9bb-4fca-9f4d-61953cbc75e6" Jan 02 00:12:01 crc kubenswrapper[4766]: I0102 00:12:01.632305 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" event={"ID":"f5b0122e-9bc8-4810-9ac2-e2352fe99edd","Type":"ContainerStarted","Data":"e6d3a3370e6da9c701ac9061eb81d20bb0defb84a7ff94157205ed0731905029"} Jan 02 00:12:01 crc kubenswrapper[4766]: I0102 00:12:01.632354 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" event={"ID":"f5b0122e-9bc8-4810-9ac2-e2352fe99edd","Type":"ContainerStarted","Data":"63a6a70e7b27822aeab82ac95018559fd1cc5ae8bcd639e29300e2815cded46f"} Jan 02 00:12:01 crc kubenswrapper[4766]: I0102 00:12:01.632626 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:12:01 crc kubenswrapper[4766]: I0102 00:12:01.639062 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" Jan 02 00:12:01 crc kubenswrapper[4766]: I0102 00:12:01.659387 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7794f9c69b-6fxbx" podStartSLOduration=64.659359678 podStartE2EDuration="1m4.659359678s" podCreationTimestamp="2026-01-02 00:10:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:12:01.652708267 +0000 UTC m=+296.451195441" watchObservedRunningTime="2026-01-02 00:12:01.659359678 +0000 UTC m=+296.457846872" Jan 02 00:12:05 crc kubenswrapper[4766]: I0102 00:12:05.388994 4766 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 02 00:12:07 crc kubenswrapper[4766]: I0102 00:12:07.534464 4766 scope.go:117] "RemoveContainer" containerID="b64df1d18979f16c29d23e0367dccef81d56c89ffa4b6ed897eb2f5de697fdbe" Jan 02 00:12:07 crc kubenswrapper[4766]: E0102 00:12:07.534938 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"check-endpoints\" with CrashLoopBackOff: \"back-off 20s restarting failed container=check-endpoints pod=network-check-source-55646444c4-trplf_openshift-network-diagnostics(9d751cbb-f2e2-430d-9754-c882a5e924a5)\"" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 02 00:12:14 crc kubenswrapper[4766]: I0102 00:12:14.691167 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 02 00:12:18 crc kubenswrapper[4766]: I0102 00:12:18.736805 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Jan 02 00:12:18 crc kubenswrapper[4766]: I0102 00:12:18.738714 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 02 00:12:18 crc kubenswrapper[4766]: I0102 00:12:18.738772 4766 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="02e21b056fd9fd590bd70144553b79b8789e74e773801711506d3b1abedd3ab8" exitCode=137 Jan 02 00:12:18 crc kubenswrapper[4766]: I0102 00:12:18.738823 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"02e21b056fd9fd590bd70144553b79b8789e74e773801711506d3b1abedd3ab8"} Jan 02 00:12:18 crc kubenswrapper[4766]: I0102 00:12:18.738948 4766 scope.go:117] "RemoveContainer" containerID="e559abcada11efef789e47cc805928e12dd03310679068cee565188cc45a9c34" Jan 02 00:12:19 crc kubenswrapper[4766]: I0102 00:12:19.538123 4766 scope.go:117] "RemoveContainer" containerID="b64df1d18979f16c29d23e0367dccef81d56c89ffa4b6ed897eb2f5de697fdbe" Jan 02 00:12:19 crc kubenswrapper[4766]: I0102 00:12:19.748975 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Jan 02 00:12:19 crc kubenswrapper[4766]: I0102 00:12:19.750417 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"54e7a1ff072a2f35aa8d95bbf1ba0f9c31647efa4423cc89cc481dba512486a8"} Jan 02 00:12:19 crc kubenswrapper[4766]: I0102 00:12:19.752590 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/2.log" Jan 02 00:12:19 crc kubenswrapper[4766]: I0102 00:12:19.752642 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"da54a319ea80e1f9da0dab8a04cecc22edd56cf1e5e1272560210ea2f1d5eb55"} Jan 02 00:12:22 crc kubenswrapper[4766]: I0102 00:12:22.160076 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:12:28 crc kubenswrapper[4766]: I0102 00:12:28.139580 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:12:28 crc kubenswrapper[4766]: I0102 00:12:28.148228 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:12:32 crc kubenswrapper[4766]: I0102 00:12:32.165336 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 02 00:12:36 crc kubenswrapper[4766]: I0102 00:12:36.840834 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj"] Jan 02 00:12:36 crc kubenswrapper[4766]: I0102 00:12:36.841633 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" podUID="3dc34986-2ecf-4078-b5d3-314e30838057" containerName="controller-manager" containerID="cri-o://2246cc8d21557651088a3e91202a42e2c0363ddf7eb3ee3cad757031e43aa2a8" gracePeriod=30 Jan 02 00:12:36 crc kubenswrapper[4766]: I0102 00:12:36.857474 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4"] Jan 02 00:12:36 crc kubenswrapper[4766]: I0102 00:12:36.857736 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" podUID="e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652" containerName="route-controller-manager" containerID="cri-o://fafa1deeba6f113daaef41aaa7805995e8c27fec5d0e9f538a460d019a7a99a3" gracePeriod=30 Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.381557 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.385855 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.419194 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-client-ca\") pod \"3dc34986-2ecf-4078-b5d3-314e30838057\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.419301 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-client-ca\") pod \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\" (UID: \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\") " Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.419337 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3dc34986-2ecf-4078-b5d3-314e30838057-serving-cert\") pod \"3dc34986-2ecf-4078-b5d3-314e30838057\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.419379 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-proxy-ca-bundles\") pod \"3dc34986-2ecf-4078-b5d3-314e30838057\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.419409 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gtvzz\" (UniqueName: \"kubernetes.io/projected/3dc34986-2ecf-4078-b5d3-314e30838057-kube-api-access-gtvzz\") pod \"3dc34986-2ecf-4078-b5d3-314e30838057\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.419437 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mppw9\" (UniqueName: \"kubernetes.io/projected/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-kube-api-access-mppw9\") pod \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\" (UID: \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\") " Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.419461 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-serving-cert\") pod \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\" (UID: \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\") " Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.419503 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-config\") pod \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\" (UID: \"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652\") " Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.419559 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-config\") pod \"3dc34986-2ecf-4078-b5d3-314e30838057\" (UID: \"3dc34986-2ecf-4078-b5d3-314e30838057\") " Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.420533 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "3dc34986-2ecf-4078-b5d3-314e30838057" (UID: "3dc34986-2ecf-4078-b5d3-314e30838057"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.420620 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-client-ca" (OuterVolumeSpecName: "client-ca") pod "3dc34986-2ecf-4078-b5d3-314e30838057" (UID: "3dc34986-2ecf-4078-b5d3-314e30838057"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.420640 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-client-ca" (OuterVolumeSpecName: "client-ca") pod "e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652" (UID: "e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.420670 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-config" (OuterVolumeSpecName: "config") pod "e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652" (UID: "e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.421037 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-config" (OuterVolumeSpecName: "config") pod "3dc34986-2ecf-4078-b5d3-314e30838057" (UID: "3dc34986-2ecf-4078-b5d3-314e30838057"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.425558 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dc34986-2ecf-4078-b5d3-314e30838057-kube-api-access-gtvzz" (OuterVolumeSpecName: "kube-api-access-gtvzz") pod "3dc34986-2ecf-4078-b5d3-314e30838057" (UID: "3dc34986-2ecf-4078-b5d3-314e30838057"). InnerVolumeSpecName "kube-api-access-gtvzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.425697 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-kube-api-access-mppw9" (OuterVolumeSpecName: "kube-api-access-mppw9") pod "e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652" (UID: "e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652"). InnerVolumeSpecName "kube-api-access-mppw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.425992 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dc34986-2ecf-4078-b5d3-314e30838057-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3dc34986-2ecf-4078-b5d3-314e30838057" (UID: "3dc34986-2ecf-4078-b5d3-314e30838057"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.427062 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652" (UID: "e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.520793 4766 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.520830 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gtvzz\" (UniqueName: \"kubernetes.io/projected/3dc34986-2ecf-4078-b5d3-314e30838057-kube-api-access-gtvzz\") on node \"crc\" DevicePath \"\"" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.520855 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mppw9\" (UniqueName: \"kubernetes.io/projected/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-kube-api-access-mppw9\") on node \"crc\" DevicePath \"\"" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.520865 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.520873 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.520882 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.520890 4766 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3dc34986-2ecf-4078-b5d3-314e30838057-client-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.520898 4766 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652-client-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.520905 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3dc34986-2ecf-4078-b5d3-314e30838057-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.866331 4766 generic.go:334] "Generic (PLEG): container finished" podID="e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652" containerID="fafa1deeba6f113daaef41aaa7805995e8c27fec5d0e9f538a460d019a7a99a3" exitCode=0 Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.866398 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.866395 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" event={"ID":"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652","Type":"ContainerDied","Data":"fafa1deeba6f113daaef41aaa7805995e8c27fec5d0e9f538a460d019a7a99a3"} Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.866501 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4" event={"ID":"e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652","Type":"ContainerDied","Data":"abdf08ae5c27a918e36f4b03b9af73b5be878a7d5427ad14ecd3af50b4193c6b"} Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.866517 4766 scope.go:117] "RemoveContainer" containerID="fafa1deeba6f113daaef41aaa7805995e8c27fec5d0e9f538a460d019a7a99a3" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.868621 4766 generic.go:334] "Generic (PLEG): container finished" podID="3dc34986-2ecf-4078-b5d3-314e30838057" containerID="2246cc8d21557651088a3e91202a42e2c0363ddf7eb3ee3cad757031e43aa2a8" exitCode=0 Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.868690 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" event={"ID":"3dc34986-2ecf-4078-b5d3-314e30838057","Type":"ContainerDied","Data":"2246cc8d21557651088a3e91202a42e2c0363ddf7eb3ee3cad757031e43aa2a8"} Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.868915 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" event={"ID":"3dc34986-2ecf-4078-b5d3-314e30838057","Type":"ContainerDied","Data":"55fd7a28ff227b730df672991da73bd10ae19fb309c924ccceb34a2b7e45283c"} Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.868791 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.884050 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4"] Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.884218 4766 scope.go:117] "RemoveContainer" containerID="fafa1deeba6f113daaef41aaa7805995e8c27fec5d0e9f538a460d019a7a99a3" Jan 02 00:12:37 crc kubenswrapper[4766]: E0102 00:12:37.885312 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fafa1deeba6f113daaef41aaa7805995e8c27fec5d0e9f538a460d019a7a99a3\": container with ID starting with fafa1deeba6f113daaef41aaa7805995e8c27fec5d0e9f538a460d019a7a99a3 not found: ID does not exist" containerID="fafa1deeba6f113daaef41aaa7805995e8c27fec5d0e9f538a460d019a7a99a3" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.885342 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fafa1deeba6f113daaef41aaa7805995e8c27fec5d0e9f538a460d019a7a99a3"} err="failed to get container status \"fafa1deeba6f113daaef41aaa7805995e8c27fec5d0e9f538a460d019a7a99a3\": rpc error: code = NotFound desc = could not find container \"fafa1deeba6f113daaef41aaa7805995e8c27fec5d0e9f538a460d019a7a99a3\": container with ID starting with fafa1deeba6f113daaef41aaa7805995e8c27fec5d0e9f538a460d019a7a99a3 not found: ID does not exist" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.885390 4766 scope.go:117] "RemoveContainer" containerID="2246cc8d21557651088a3e91202a42e2c0363ddf7eb3ee3cad757031e43aa2a8" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.889710 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58d4986c5-g6pg4"] Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.899071 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj"] Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.903222 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6584d6d7d8-lbkjj"] Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.906468 4766 scope.go:117] "RemoveContainer" containerID="2246cc8d21557651088a3e91202a42e2c0363ddf7eb3ee3cad757031e43aa2a8" Jan 02 00:12:37 crc kubenswrapper[4766]: E0102 00:12:37.907144 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2246cc8d21557651088a3e91202a42e2c0363ddf7eb3ee3cad757031e43aa2a8\": container with ID starting with 2246cc8d21557651088a3e91202a42e2c0363ddf7eb3ee3cad757031e43aa2a8 not found: ID does not exist" containerID="2246cc8d21557651088a3e91202a42e2c0363ddf7eb3ee3cad757031e43aa2a8" Jan 02 00:12:37 crc kubenswrapper[4766]: I0102 00:12:37.907182 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2246cc8d21557651088a3e91202a42e2c0363ddf7eb3ee3cad757031e43aa2a8"} err="failed to get container status \"2246cc8d21557651088a3e91202a42e2c0363ddf7eb3ee3cad757031e43aa2a8\": rpc error: code = NotFound desc = could not find container \"2246cc8d21557651088a3e91202a42e2c0363ddf7eb3ee3cad757031e43aa2a8\": container with ID starting with 2246cc8d21557651088a3e91202a42e2c0363ddf7eb3ee3cad757031e43aa2a8 not found: ID does not exist" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.756766 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q"] Jan 02 00:12:38 crc kubenswrapper[4766]: E0102 00:12:38.757069 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652" containerName="route-controller-manager" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.757091 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652" containerName="route-controller-manager" Jan 02 00:12:38 crc kubenswrapper[4766]: E0102 00:12:38.757105 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dc34986-2ecf-4078-b5d3-314e30838057" containerName="controller-manager" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.757123 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dc34986-2ecf-4078-b5d3-314e30838057" containerName="controller-manager" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.757279 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652" containerName="route-controller-manager" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.757304 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dc34986-2ecf-4078-b5d3-314e30838057" containerName="controller-manager" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.757839 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.759709 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.759728 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.759775 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.759996 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.760154 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.760536 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-679d948996-s4xtw"] Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.761391 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.762826 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.763694 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.764025 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.764102 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.766197 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.766335 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.768343 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q"] Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.769182 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.770483 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-679d948996-s4xtw"] Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.771163 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.834676 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb9c728e-f90d-4e49-973b-0f775670ac52-config\") pod \"route-controller-manager-775fdc88b6-d7j7q\" (UID: \"fb9c728e-f90d-4e49-973b-0f775670ac52\") " pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.834725 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-proxy-ca-bundles\") pod \"controller-manager-679d948996-s4xtw\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.834753 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zwfd\" (UniqueName: \"kubernetes.io/projected/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-kube-api-access-2zwfd\") pod \"controller-manager-679d948996-s4xtw\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.834895 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fb9c728e-f90d-4e49-973b-0f775670ac52-client-ca\") pod \"route-controller-manager-775fdc88b6-d7j7q\" (UID: \"fb9c728e-f90d-4e49-973b-0f775670ac52\") " pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.834937 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-serving-cert\") pod \"controller-manager-679d948996-s4xtw\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.835012 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-config\") pod \"controller-manager-679d948996-s4xtw\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.835079 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-client-ca\") pod \"controller-manager-679d948996-s4xtw\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.835108 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9l4h\" (UniqueName: \"kubernetes.io/projected/fb9c728e-f90d-4e49-973b-0f775670ac52-kube-api-access-v9l4h\") pod \"route-controller-manager-775fdc88b6-d7j7q\" (UID: \"fb9c728e-f90d-4e49-973b-0f775670ac52\") " pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.835169 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fb9c728e-f90d-4e49-973b-0f775670ac52-serving-cert\") pod \"route-controller-manager-775fdc88b6-d7j7q\" (UID: \"fb9c728e-f90d-4e49-973b-0f775670ac52\") " pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.936553 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-proxy-ca-bundles\") pod \"controller-manager-679d948996-s4xtw\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.936613 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb9c728e-f90d-4e49-973b-0f775670ac52-config\") pod \"route-controller-manager-775fdc88b6-d7j7q\" (UID: \"fb9c728e-f90d-4e49-973b-0f775670ac52\") " pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.936643 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zwfd\" (UniqueName: \"kubernetes.io/projected/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-kube-api-access-2zwfd\") pod \"controller-manager-679d948996-s4xtw\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.936697 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fb9c728e-f90d-4e49-973b-0f775670ac52-client-ca\") pod \"route-controller-manager-775fdc88b6-d7j7q\" (UID: \"fb9c728e-f90d-4e49-973b-0f775670ac52\") " pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.936728 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-serving-cert\") pod \"controller-manager-679d948996-s4xtw\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.936756 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-config\") pod \"controller-manager-679d948996-s4xtw\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.936785 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-client-ca\") pod \"controller-manager-679d948996-s4xtw\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.936809 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9l4h\" (UniqueName: \"kubernetes.io/projected/fb9c728e-f90d-4e49-973b-0f775670ac52-kube-api-access-v9l4h\") pod \"route-controller-manager-775fdc88b6-d7j7q\" (UID: \"fb9c728e-f90d-4e49-973b-0f775670ac52\") " pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.936871 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fb9c728e-f90d-4e49-973b-0f775670ac52-serving-cert\") pod \"route-controller-manager-775fdc88b6-d7j7q\" (UID: \"fb9c728e-f90d-4e49-973b-0f775670ac52\") " pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.937832 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-proxy-ca-bundles\") pod \"controller-manager-679d948996-s4xtw\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.937911 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fb9c728e-f90d-4e49-973b-0f775670ac52-client-ca\") pod \"route-controller-manager-775fdc88b6-d7j7q\" (UID: \"fb9c728e-f90d-4e49-973b-0f775670ac52\") " pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.938089 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb9c728e-f90d-4e49-973b-0f775670ac52-config\") pod \"route-controller-manager-775fdc88b6-d7j7q\" (UID: \"fb9c728e-f90d-4e49-973b-0f775670ac52\") " pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.939257 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-config\") pod \"controller-manager-679d948996-s4xtw\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.939393 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-client-ca\") pod \"controller-manager-679d948996-s4xtw\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.942679 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-serving-cert\") pod \"controller-manager-679d948996-s4xtw\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.945427 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fb9c728e-f90d-4e49-973b-0f775670ac52-serving-cert\") pod \"route-controller-manager-775fdc88b6-d7j7q\" (UID: \"fb9c728e-f90d-4e49-973b-0f775670ac52\") " pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.959817 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9l4h\" (UniqueName: \"kubernetes.io/projected/fb9c728e-f90d-4e49-973b-0f775670ac52-kube-api-access-v9l4h\") pod \"route-controller-manager-775fdc88b6-d7j7q\" (UID: \"fb9c728e-f90d-4e49-973b-0f775670ac52\") " pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:38 crc kubenswrapper[4766]: I0102 00:12:38.960284 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zwfd\" (UniqueName: \"kubernetes.io/projected/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-kube-api-access-2zwfd\") pod \"controller-manager-679d948996-s4xtw\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:39 crc kubenswrapper[4766]: I0102 00:12:39.077467 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:39 crc kubenswrapper[4766]: I0102 00:12:39.093669 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:39 crc kubenswrapper[4766]: I0102 00:12:39.279256 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q"] Jan 02 00:12:39 crc kubenswrapper[4766]: W0102 00:12:39.283628 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfb9c728e_f90d_4e49_973b_0f775670ac52.slice/crio-675ca343fe176cf4802512e3a7da5abed49f552ab76c468f10770661a18e44d3 WatchSource:0}: Error finding container 675ca343fe176cf4802512e3a7da5abed49f552ab76c468f10770661a18e44d3: Status 404 returned error can't find the container with id 675ca343fe176cf4802512e3a7da5abed49f552ab76c468f10770661a18e44d3 Jan 02 00:12:39 crc kubenswrapper[4766]: I0102 00:12:39.525683 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-679d948996-s4xtw"] Jan 02 00:12:39 crc kubenswrapper[4766]: W0102 00:12:39.533876 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podea1f79d9_5dd4_43ef_9f44_7d9ff305dc8e.slice/crio-2d1e2401d70945204b42587055748c58b334f341200f5e4db9b9d4e2cf7d7993 WatchSource:0}: Error finding container 2d1e2401d70945204b42587055748c58b334f341200f5e4db9b9d4e2cf7d7993: Status 404 returned error can't find the container with id 2d1e2401d70945204b42587055748c58b334f341200f5e4db9b9d4e2cf7d7993 Jan 02 00:12:39 crc kubenswrapper[4766]: I0102 00:12:39.539491 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dc34986-2ecf-4078-b5d3-314e30838057" path="/var/lib/kubelet/pods/3dc34986-2ecf-4078-b5d3-314e30838057/volumes" Jan 02 00:12:39 crc kubenswrapper[4766]: I0102 00:12:39.540361 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652" path="/var/lib/kubelet/pods/e60ebdb8-c5e0-4f5e-96ce-59f6df2d0652/volumes" Jan 02 00:12:39 crc kubenswrapper[4766]: I0102 00:12:39.883770 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" event={"ID":"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e","Type":"ContainerStarted","Data":"10f10ef4092f5e9d1cf8c922c73a9a2a89dc966998e0cc09a76f4efe57d1fc3f"} Jan 02 00:12:39 crc kubenswrapper[4766]: I0102 00:12:39.883863 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" event={"ID":"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e","Type":"ContainerStarted","Data":"2d1e2401d70945204b42587055748c58b334f341200f5e4db9b9d4e2cf7d7993"} Jan 02 00:12:39 crc kubenswrapper[4766]: I0102 00:12:39.884266 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:39 crc kubenswrapper[4766]: I0102 00:12:39.886596 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" event={"ID":"fb9c728e-f90d-4e49-973b-0f775670ac52","Type":"ContainerStarted","Data":"af7b1af342ed08aad984605d044c3fdafeaf3bbd677ef72dd61352a1cf99fb56"} Jan 02 00:12:39 crc kubenswrapper[4766]: I0102 00:12:39.886655 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" event={"ID":"fb9c728e-f90d-4e49-973b-0f775670ac52","Type":"ContainerStarted","Data":"675ca343fe176cf4802512e3a7da5abed49f552ab76c468f10770661a18e44d3"} Jan 02 00:12:39 crc kubenswrapper[4766]: I0102 00:12:39.887430 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:39 crc kubenswrapper[4766]: I0102 00:12:39.932394 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:12:39 crc kubenswrapper[4766]: I0102 00:12:39.943123 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" podStartSLOduration=3.943091969 podStartE2EDuration="3.943091969s" podCreationTimestamp="2026-01-02 00:12:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:12:39.940113789 +0000 UTC m=+334.738600963" watchObservedRunningTime="2026-01-02 00:12:39.943091969 +0000 UTC m=+334.741579123" Jan 02 00:12:40 crc kubenswrapper[4766]: I0102 00:12:40.030610 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" podStartSLOduration=4.030589192 podStartE2EDuration="4.030589192s" podCreationTimestamp="2026-01-02 00:12:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:12:40.029314897 +0000 UTC m=+334.827802051" watchObservedRunningTime="2026-01-02 00:12:40.030589192 +0000 UTC m=+334.829076346" Jan 02 00:12:40 crc kubenswrapper[4766]: I0102 00:12:40.071867 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:41 crc kubenswrapper[4766]: I0102 00:12:41.024269 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q"] Jan 02 00:12:42 crc kubenswrapper[4766]: I0102 00:12:42.904360 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" podUID="fb9c728e-f90d-4e49-973b-0f775670ac52" containerName="route-controller-manager" containerID="cri-o://af7b1af342ed08aad984605d044c3fdafeaf3bbd677ef72dd61352a1cf99fb56" gracePeriod=30 Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.299870 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.326579 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56"] Jan 02 00:12:43 crc kubenswrapper[4766]: E0102 00:12:43.326881 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb9c728e-f90d-4e49-973b-0f775670ac52" containerName="route-controller-manager" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.326899 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb9c728e-f90d-4e49-973b-0f775670ac52" containerName="route-controller-manager" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.327035 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb9c728e-f90d-4e49-973b-0f775670ac52" containerName="route-controller-manager" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.327550 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.341241 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56"] Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.398265 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb9c728e-f90d-4e49-973b-0f775670ac52-config\") pod \"fb9c728e-f90d-4e49-973b-0f775670ac52\" (UID: \"fb9c728e-f90d-4e49-973b-0f775670ac52\") " Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.398413 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fb9c728e-f90d-4e49-973b-0f775670ac52-serving-cert\") pod \"fb9c728e-f90d-4e49-973b-0f775670ac52\" (UID: \"fb9c728e-f90d-4e49-973b-0f775670ac52\") " Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.398473 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9l4h\" (UniqueName: \"kubernetes.io/projected/fb9c728e-f90d-4e49-973b-0f775670ac52-kube-api-access-v9l4h\") pod \"fb9c728e-f90d-4e49-973b-0f775670ac52\" (UID: \"fb9c728e-f90d-4e49-973b-0f775670ac52\") " Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.398523 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fb9c728e-f90d-4e49-973b-0f775670ac52-client-ca\") pod \"fb9c728e-f90d-4e49-973b-0f775670ac52\" (UID: \"fb9c728e-f90d-4e49-973b-0f775670ac52\") " Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.398718 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8983098f-af49-4bc4-8083-38754f1e7f8b-config\") pod \"route-controller-manager-848fc9dbbc-jpw56\" (UID: \"8983098f-af49-4bc4-8083-38754f1e7f8b\") " pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.398778 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbkst\" (UniqueName: \"kubernetes.io/projected/8983098f-af49-4bc4-8083-38754f1e7f8b-kube-api-access-kbkst\") pod \"route-controller-manager-848fc9dbbc-jpw56\" (UID: \"8983098f-af49-4bc4-8083-38754f1e7f8b\") " pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.398805 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8983098f-af49-4bc4-8083-38754f1e7f8b-serving-cert\") pod \"route-controller-manager-848fc9dbbc-jpw56\" (UID: \"8983098f-af49-4bc4-8083-38754f1e7f8b\") " pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.398827 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8983098f-af49-4bc4-8083-38754f1e7f8b-client-ca\") pod \"route-controller-manager-848fc9dbbc-jpw56\" (UID: \"8983098f-af49-4bc4-8083-38754f1e7f8b\") " pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.399134 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb9c728e-f90d-4e49-973b-0f775670ac52-config" (OuterVolumeSpecName: "config") pod "fb9c728e-f90d-4e49-973b-0f775670ac52" (UID: "fb9c728e-f90d-4e49-973b-0f775670ac52"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.399297 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb9c728e-f90d-4e49-973b-0f775670ac52-client-ca" (OuterVolumeSpecName: "client-ca") pod "fb9c728e-f90d-4e49-973b-0f775670ac52" (UID: "fb9c728e-f90d-4e49-973b-0f775670ac52"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.408982 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb9c728e-f90d-4e49-973b-0f775670ac52-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "fb9c728e-f90d-4e49-973b-0f775670ac52" (UID: "fb9c728e-f90d-4e49-973b-0f775670ac52"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.409673 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb9c728e-f90d-4e49-973b-0f775670ac52-kube-api-access-v9l4h" (OuterVolumeSpecName: "kube-api-access-v9l4h") pod "fb9c728e-f90d-4e49-973b-0f775670ac52" (UID: "fb9c728e-f90d-4e49-973b-0f775670ac52"). InnerVolumeSpecName "kube-api-access-v9l4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.500379 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8983098f-af49-4bc4-8083-38754f1e7f8b-config\") pod \"route-controller-manager-848fc9dbbc-jpw56\" (UID: \"8983098f-af49-4bc4-8083-38754f1e7f8b\") " pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.500461 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbkst\" (UniqueName: \"kubernetes.io/projected/8983098f-af49-4bc4-8083-38754f1e7f8b-kube-api-access-kbkst\") pod \"route-controller-manager-848fc9dbbc-jpw56\" (UID: \"8983098f-af49-4bc4-8083-38754f1e7f8b\") " pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.500497 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8983098f-af49-4bc4-8083-38754f1e7f8b-serving-cert\") pod \"route-controller-manager-848fc9dbbc-jpw56\" (UID: \"8983098f-af49-4bc4-8083-38754f1e7f8b\") " pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.500522 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8983098f-af49-4bc4-8083-38754f1e7f8b-client-ca\") pod \"route-controller-manager-848fc9dbbc-jpw56\" (UID: \"8983098f-af49-4bc4-8083-38754f1e7f8b\") " pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.500583 4766 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fb9c728e-f90d-4e49-973b-0f775670ac52-client-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.500598 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb9c728e-f90d-4e49-973b-0f775670ac52-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.500610 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fb9c728e-f90d-4e49-973b-0f775670ac52-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.500624 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9l4h\" (UniqueName: \"kubernetes.io/projected/fb9c728e-f90d-4e49-973b-0f775670ac52-kube-api-access-v9l4h\") on node \"crc\" DevicePath \"\"" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.501659 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8983098f-af49-4bc4-8083-38754f1e7f8b-client-ca\") pod \"route-controller-manager-848fc9dbbc-jpw56\" (UID: \"8983098f-af49-4bc4-8083-38754f1e7f8b\") " pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.501864 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8983098f-af49-4bc4-8083-38754f1e7f8b-config\") pod \"route-controller-manager-848fc9dbbc-jpw56\" (UID: \"8983098f-af49-4bc4-8083-38754f1e7f8b\") " pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.508646 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8983098f-af49-4bc4-8083-38754f1e7f8b-serving-cert\") pod \"route-controller-manager-848fc9dbbc-jpw56\" (UID: \"8983098f-af49-4bc4-8083-38754f1e7f8b\") " pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.521470 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbkst\" (UniqueName: \"kubernetes.io/projected/8983098f-af49-4bc4-8083-38754f1e7f8b-kube-api-access-kbkst\") pod \"route-controller-manager-848fc9dbbc-jpw56\" (UID: \"8983098f-af49-4bc4-8083-38754f1e7f8b\") " pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.653712 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.913932 4766 generic.go:334] "Generic (PLEG): container finished" podID="fb9c728e-f90d-4e49-973b-0f775670ac52" containerID="af7b1af342ed08aad984605d044c3fdafeaf3bbd677ef72dd61352a1cf99fb56" exitCode=0 Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.913991 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" event={"ID":"fb9c728e-f90d-4e49-973b-0f775670ac52","Type":"ContainerDied","Data":"af7b1af342ed08aad984605d044c3fdafeaf3bbd677ef72dd61352a1cf99fb56"} Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.914027 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" event={"ID":"fb9c728e-f90d-4e49-973b-0f775670ac52","Type":"ContainerDied","Data":"675ca343fe176cf4802512e3a7da5abed49f552ab76c468f10770661a18e44d3"} Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.914047 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.914054 4766 scope.go:117] "RemoveContainer" containerID="af7b1af342ed08aad984605d044c3fdafeaf3bbd677ef72dd61352a1cf99fb56" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.934350 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q"] Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.937177 4766 scope.go:117] "RemoveContainer" containerID="af7b1af342ed08aad984605d044c3fdafeaf3bbd677ef72dd61352a1cf99fb56" Jan 02 00:12:43 crc kubenswrapper[4766]: E0102 00:12:43.937637 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af7b1af342ed08aad984605d044c3fdafeaf3bbd677ef72dd61352a1cf99fb56\": container with ID starting with af7b1af342ed08aad984605d044c3fdafeaf3bbd677ef72dd61352a1cf99fb56 not found: ID does not exist" containerID="af7b1af342ed08aad984605d044c3fdafeaf3bbd677ef72dd61352a1cf99fb56" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.937694 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af7b1af342ed08aad984605d044c3fdafeaf3bbd677ef72dd61352a1cf99fb56"} err="failed to get container status \"af7b1af342ed08aad984605d044c3fdafeaf3bbd677ef72dd61352a1cf99fb56\": rpc error: code = NotFound desc = could not find container \"af7b1af342ed08aad984605d044c3fdafeaf3bbd677ef72dd61352a1cf99fb56\": container with ID starting with af7b1af342ed08aad984605d044c3fdafeaf3bbd677ef72dd61352a1cf99fb56 not found: ID does not exist" Jan 02 00:12:43 crc kubenswrapper[4766]: I0102 00:12:43.939029 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-775fdc88b6-d7j7q"] Jan 02 00:12:44 crc kubenswrapper[4766]: I0102 00:12:44.031609 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56"] Jan 02 00:12:44 crc kubenswrapper[4766]: I0102 00:12:44.923800 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" event={"ID":"8983098f-af49-4bc4-8083-38754f1e7f8b","Type":"ContainerStarted","Data":"eaa07e0ac0aa63100f4dd94f26e6a41fce364d567d60a0b9dc2fa1e9509b3348"} Jan 02 00:12:44 crc kubenswrapper[4766]: I0102 00:12:44.924245 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" Jan 02 00:12:44 crc kubenswrapper[4766]: I0102 00:12:44.924259 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" event={"ID":"8983098f-af49-4bc4-8083-38754f1e7f8b","Type":"ContainerStarted","Data":"ebce4b218b4e078f4c563c3a8f8058c135b379f25e8042a8353694ad3cd31847"} Jan 02 00:12:44 crc kubenswrapper[4766]: I0102 00:12:44.929209 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" Jan 02 00:12:44 crc kubenswrapper[4766]: I0102 00:12:44.941502 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-848fc9dbbc-jpw56" podStartSLOduration=3.941477568 podStartE2EDuration="3.941477568s" podCreationTimestamp="2026-01-02 00:12:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:12:44.94117723 +0000 UTC m=+339.739664384" watchObservedRunningTime="2026-01-02 00:12:44.941477568 +0000 UTC m=+339.739964732" Jan 02 00:12:45 crc kubenswrapper[4766]: I0102 00:12:45.541210 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb9c728e-f90d-4e49-973b-0f775670ac52" path="/var/lib/kubelet/pods/fb9c728e-f90d-4e49-973b-0f775670ac52/volumes" Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.004708 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-679d948996-s4xtw"] Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.005391 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" podUID="ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e" containerName="controller-manager" containerID="cri-o://10f10ef4092f5e9d1cf8c922c73a9a2a89dc966998e0cc09a76f4efe57d1fc3f" gracePeriod=30 Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.149078 4766 generic.go:334] "Generic (PLEG): container finished" podID="ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e" containerID="10f10ef4092f5e9d1cf8c922c73a9a2a89dc966998e0cc09a76f4efe57d1fc3f" exitCode=0 Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.149122 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" event={"ID":"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e","Type":"ContainerDied","Data":"10f10ef4092f5e9d1cf8c922c73a9a2a89dc966998e0cc09a76f4efe57d1fc3f"} Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.371993 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.558221 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-proxy-ca-bundles\") pod \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.558334 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-client-ca\") pod \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.558367 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zwfd\" (UniqueName: \"kubernetes.io/projected/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-kube-api-access-2zwfd\") pod \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.558391 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-serving-cert\") pod \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.558432 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-config\") pod \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\" (UID: \"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e\") " Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.559107 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e" (UID: "ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.559168 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-config" (OuterVolumeSpecName: "config") pod "ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e" (UID: "ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.559216 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-client-ca" (OuterVolumeSpecName: "client-ca") pod "ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e" (UID: "ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.564407 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-kube-api-access-2zwfd" (OuterVolumeSpecName: "kube-api-access-2zwfd") pod "ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e" (UID: "ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e"). InnerVolumeSpecName "kube-api-access-2zwfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.564979 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e" (UID: "ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.659654 4766 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-client-ca\") on node \"crc\" DevicePath \"\"" Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.659726 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zwfd\" (UniqueName: \"kubernetes.io/projected/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-kube-api-access-2zwfd\") on node \"crc\" DevicePath \"\"" Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.659771 4766 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.659798 4766 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:13:21 crc kubenswrapper[4766]: I0102 00:13:21.659823 4766 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 02 00:13:22 crc kubenswrapper[4766]: I0102 00:13:22.153893 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" event={"ID":"ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e","Type":"ContainerDied","Data":"2d1e2401d70945204b42587055748c58b334f341200f5e4db9b9d4e2cf7d7993"} Jan 02 00:13:22 crc kubenswrapper[4766]: I0102 00:13:22.153928 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-679d948996-s4xtw" Jan 02 00:13:22 crc kubenswrapper[4766]: I0102 00:13:22.153940 4766 scope.go:117] "RemoveContainer" containerID="10f10ef4092f5e9d1cf8c922c73a9a2a89dc966998e0cc09a76f4efe57d1fc3f" Jan 02 00:13:22 crc kubenswrapper[4766]: I0102 00:13:22.182727 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-679d948996-s4xtw"] Jan 02 00:13:22 crc kubenswrapper[4766]: I0102 00:13:22.186051 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-679d948996-s4xtw"] Jan 02 00:13:22 crc kubenswrapper[4766]: I0102 00:13:22.797118 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-859d75bc7-h2bfm"] Jan 02 00:13:22 crc kubenswrapper[4766]: E0102 00:13:22.797672 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e" containerName="controller-manager" Jan 02 00:13:22 crc kubenswrapper[4766]: I0102 00:13:22.797687 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e" containerName="controller-manager" Jan 02 00:13:22 crc kubenswrapper[4766]: I0102 00:13:22.797837 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e" containerName="controller-manager" Jan 02 00:13:22 crc kubenswrapper[4766]: I0102 00:13:22.798281 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:22 crc kubenswrapper[4766]: I0102 00:13:22.802464 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 02 00:13:22 crc kubenswrapper[4766]: I0102 00:13:22.802565 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 02 00:13:22 crc kubenswrapper[4766]: I0102 00:13:22.802803 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 02 00:13:22 crc kubenswrapper[4766]: I0102 00:13:22.803022 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 02 00:13:22 crc kubenswrapper[4766]: I0102 00:13:22.803064 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 02 00:13:22 crc kubenswrapper[4766]: I0102 00:13:22.803169 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 02 00:13:22 crc kubenswrapper[4766]: I0102 00:13:22.822134 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 02 00:13:22 crc kubenswrapper[4766]: I0102 00:13:22.823984 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-859d75bc7-h2bfm"] Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.004698 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/535025fc-6a8c-48a8-8804-4931abd52693-client-ca\") pod \"controller-manager-859d75bc7-h2bfm\" (UID: \"535025fc-6a8c-48a8-8804-4931abd52693\") " pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.004750 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/535025fc-6a8c-48a8-8804-4931abd52693-serving-cert\") pod \"controller-manager-859d75bc7-h2bfm\" (UID: \"535025fc-6a8c-48a8-8804-4931abd52693\") " pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.004771 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/535025fc-6a8c-48a8-8804-4931abd52693-config\") pod \"controller-manager-859d75bc7-h2bfm\" (UID: \"535025fc-6a8c-48a8-8804-4931abd52693\") " pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.004859 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/535025fc-6a8c-48a8-8804-4931abd52693-proxy-ca-bundles\") pod \"controller-manager-859d75bc7-h2bfm\" (UID: \"535025fc-6a8c-48a8-8804-4931abd52693\") " pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.004880 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cscm\" (UniqueName: \"kubernetes.io/projected/535025fc-6a8c-48a8-8804-4931abd52693-kube-api-access-8cscm\") pod \"controller-manager-859d75bc7-h2bfm\" (UID: \"535025fc-6a8c-48a8-8804-4931abd52693\") " pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.106716 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/535025fc-6a8c-48a8-8804-4931abd52693-proxy-ca-bundles\") pod \"controller-manager-859d75bc7-h2bfm\" (UID: \"535025fc-6a8c-48a8-8804-4931abd52693\") " pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.106881 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cscm\" (UniqueName: \"kubernetes.io/projected/535025fc-6a8c-48a8-8804-4931abd52693-kube-api-access-8cscm\") pod \"controller-manager-859d75bc7-h2bfm\" (UID: \"535025fc-6a8c-48a8-8804-4931abd52693\") " pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.106940 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/535025fc-6a8c-48a8-8804-4931abd52693-client-ca\") pod \"controller-manager-859d75bc7-h2bfm\" (UID: \"535025fc-6a8c-48a8-8804-4931abd52693\") " pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.106988 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/535025fc-6a8c-48a8-8804-4931abd52693-serving-cert\") pod \"controller-manager-859d75bc7-h2bfm\" (UID: \"535025fc-6a8c-48a8-8804-4931abd52693\") " pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.107039 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/535025fc-6a8c-48a8-8804-4931abd52693-config\") pod \"controller-manager-859d75bc7-h2bfm\" (UID: \"535025fc-6a8c-48a8-8804-4931abd52693\") " pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.108740 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/535025fc-6a8c-48a8-8804-4931abd52693-client-ca\") pod \"controller-manager-859d75bc7-h2bfm\" (UID: \"535025fc-6a8c-48a8-8804-4931abd52693\") " pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.109292 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/535025fc-6a8c-48a8-8804-4931abd52693-proxy-ca-bundles\") pod \"controller-manager-859d75bc7-h2bfm\" (UID: \"535025fc-6a8c-48a8-8804-4931abd52693\") " pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.109813 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/535025fc-6a8c-48a8-8804-4931abd52693-config\") pod \"controller-manager-859d75bc7-h2bfm\" (UID: \"535025fc-6a8c-48a8-8804-4931abd52693\") " pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.114810 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/535025fc-6a8c-48a8-8804-4931abd52693-serving-cert\") pod \"controller-manager-859d75bc7-h2bfm\" (UID: \"535025fc-6a8c-48a8-8804-4931abd52693\") " pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.137615 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cscm\" (UniqueName: \"kubernetes.io/projected/535025fc-6a8c-48a8-8804-4931abd52693-kube-api-access-8cscm\") pod \"controller-manager-859d75bc7-h2bfm\" (UID: \"535025fc-6a8c-48a8-8804-4931abd52693\") " pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.412939 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.549908 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e" path="/var/lib/kubelet/pods/ea1f79d9-5dd4-43ef-9f44-7d9ff305dc8e/volumes" Jan 02 00:13:23 crc kubenswrapper[4766]: I0102 00:13:23.672133 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-859d75bc7-h2bfm"] Jan 02 00:13:24 crc kubenswrapper[4766]: I0102 00:13:24.175555 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" event={"ID":"535025fc-6a8c-48a8-8804-4931abd52693","Type":"ContainerStarted","Data":"c8498727e7c95ddf8ebd9ca1c88096a25e1acd1f1b636186d038f124bed63350"} Jan 02 00:13:24 crc kubenswrapper[4766]: I0102 00:13:24.175596 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" event={"ID":"535025fc-6a8c-48a8-8804-4931abd52693","Type":"ContainerStarted","Data":"f08aa1f26b8ac5ed637a3d6bc0780f71e2e505c6c4730c963eca2987fc683384"} Jan 02 00:13:24 crc kubenswrapper[4766]: I0102 00:13:24.175929 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:24 crc kubenswrapper[4766]: I0102 00:13:24.180042 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" Jan 02 00:13:24 crc kubenswrapper[4766]: I0102 00:13:24.180304 4766 patch_prober.go:28] interesting pod/machine-config-daemon-hlsf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 02 00:13:24 crc kubenswrapper[4766]: I0102 00:13:24.180364 4766 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 02 00:13:24 crc kubenswrapper[4766]: I0102 00:13:24.191476 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-859d75bc7-h2bfm" podStartSLOduration=3.19145712 podStartE2EDuration="3.19145712s" podCreationTimestamp="2026-01-02 00:13:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:13:24.189624261 +0000 UTC m=+378.988111415" watchObservedRunningTime="2026-01-02 00:13:24.19145712 +0000 UTC m=+378.989944294" Jan 02 00:13:54 crc kubenswrapper[4766]: I0102 00:13:54.181405 4766 patch_prober.go:28] interesting pod/machine-config-daemon-hlsf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 02 00:13:54 crc kubenswrapper[4766]: I0102 00:13:54.181991 4766 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 02 00:14:24 crc kubenswrapper[4766]: I0102 00:14:24.181687 4766 patch_prober.go:28] interesting pod/machine-config-daemon-hlsf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 02 00:14:24 crc kubenswrapper[4766]: I0102 00:14:24.182503 4766 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 02 00:14:24 crc kubenswrapper[4766]: I0102 00:14:24.182573 4766 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:14:24 crc kubenswrapper[4766]: I0102 00:14:24.183342 4766 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bce71262223102a1e221708ed0cb2549b154d42a224262c021bd129659081e5c"} pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 02 00:14:24 crc kubenswrapper[4766]: I0102 00:14:24.183444 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" containerID="cri-o://bce71262223102a1e221708ed0cb2549b154d42a224262c021bd129659081e5c" gracePeriod=600 Jan 02 00:14:24 crc kubenswrapper[4766]: I0102 00:14:24.545806 4766 generic.go:334] "Generic (PLEG): container finished" podID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerID="bce71262223102a1e221708ed0cb2549b154d42a224262c021bd129659081e5c" exitCode=0 Jan 02 00:14:24 crc kubenswrapper[4766]: I0102 00:14:24.546075 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" event={"ID":"a62872f4-c5c6-4f43-ba8a-40e0c96e6968","Type":"ContainerDied","Data":"bce71262223102a1e221708ed0cb2549b154d42a224262c021bd129659081e5c"} Jan 02 00:14:24 crc kubenswrapper[4766]: I0102 00:14:24.546667 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" event={"ID":"a62872f4-c5c6-4f43-ba8a-40e0c96e6968","Type":"ContainerStarted","Data":"a3a7bcfe3f9b4401d1f157274622d2a7ef5d28b924c1e0978ebb8ff12e7ce91d"} Jan 02 00:14:24 crc kubenswrapper[4766]: I0102 00:14:24.546714 4766 scope.go:117] "RemoveContainer" containerID="b78f11f5c5bcf915a2a4a95c85cf95488ac44f8dc855300bde4f7da6574223be" Jan 02 00:15:00 crc kubenswrapper[4766]: I0102 00:15:00.184693 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4"] Jan 02 00:15:00 crc kubenswrapper[4766]: I0102 00:15:00.187153 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4" Jan 02 00:15:00 crc kubenswrapper[4766]: I0102 00:15:00.192000 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 02 00:15:00 crc kubenswrapper[4766]: I0102 00:15:00.192916 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 02 00:15:00 crc kubenswrapper[4766]: I0102 00:15:00.196139 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4"] Jan 02 00:15:00 crc kubenswrapper[4766]: I0102 00:15:00.296218 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zp9jw\" (UniqueName: \"kubernetes.io/projected/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-kube-api-access-zp9jw\") pod \"collect-profiles-29455215-wpqg4\" (UID: \"200a900b-a8ea-4c56-b1d6-76e8a434d3f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4" Jan 02 00:15:00 crc kubenswrapper[4766]: I0102 00:15:00.296257 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-config-volume\") pod \"collect-profiles-29455215-wpqg4\" (UID: \"200a900b-a8ea-4c56-b1d6-76e8a434d3f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4" Jan 02 00:15:00 crc kubenswrapper[4766]: I0102 00:15:00.296301 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-secret-volume\") pod \"collect-profiles-29455215-wpqg4\" (UID: \"200a900b-a8ea-4c56-b1d6-76e8a434d3f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4" Jan 02 00:15:00 crc kubenswrapper[4766]: I0102 00:15:00.397827 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zp9jw\" (UniqueName: \"kubernetes.io/projected/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-kube-api-access-zp9jw\") pod \"collect-profiles-29455215-wpqg4\" (UID: \"200a900b-a8ea-4c56-b1d6-76e8a434d3f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4" Jan 02 00:15:00 crc kubenswrapper[4766]: I0102 00:15:00.397908 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-config-volume\") pod \"collect-profiles-29455215-wpqg4\" (UID: \"200a900b-a8ea-4c56-b1d6-76e8a434d3f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4" Jan 02 00:15:00 crc kubenswrapper[4766]: I0102 00:15:00.397971 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-secret-volume\") pod \"collect-profiles-29455215-wpqg4\" (UID: \"200a900b-a8ea-4c56-b1d6-76e8a434d3f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4" Jan 02 00:15:00 crc kubenswrapper[4766]: I0102 00:15:00.399324 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-config-volume\") pod \"collect-profiles-29455215-wpqg4\" (UID: \"200a900b-a8ea-4c56-b1d6-76e8a434d3f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4" Jan 02 00:15:00 crc kubenswrapper[4766]: I0102 00:15:00.415176 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-secret-volume\") pod \"collect-profiles-29455215-wpqg4\" (UID: \"200a900b-a8ea-4c56-b1d6-76e8a434d3f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4" Jan 02 00:15:00 crc kubenswrapper[4766]: I0102 00:15:00.415626 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zp9jw\" (UniqueName: \"kubernetes.io/projected/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-kube-api-access-zp9jw\") pod \"collect-profiles-29455215-wpqg4\" (UID: \"200a900b-a8ea-4c56-b1d6-76e8a434d3f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4" Jan 02 00:15:00 crc kubenswrapper[4766]: I0102 00:15:00.504958 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4" Jan 02 00:15:00 crc kubenswrapper[4766]: I0102 00:15:00.954582 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4"] Jan 02 00:15:01 crc kubenswrapper[4766]: I0102 00:15:01.784655 4766 generic.go:334] "Generic (PLEG): container finished" podID="200a900b-a8ea-4c56-b1d6-76e8a434d3f4" containerID="264ce9d230873825c50e70ca68a4235fb2e7d9b9d3dc11051c2ed7dabafedee8" exitCode=0 Jan 02 00:15:01 crc kubenswrapper[4766]: I0102 00:15:01.784704 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4" event={"ID":"200a900b-a8ea-4c56-b1d6-76e8a434d3f4","Type":"ContainerDied","Data":"264ce9d230873825c50e70ca68a4235fb2e7d9b9d3dc11051c2ed7dabafedee8"} Jan 02 00:15:01 crc kubenswrapper[4766]: I0102 00:15:01.784732 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4" event={"ID":"200a900b-a8ea-4c56-b1d6-76e8a434d3f4","Type":"ContainerStarted","Data":"16f715288488c04667c816c023fda565408080ecb6c759c21830d5a0f619b0ea"} Jan 02 00:15:03 crc kubenswrapper[4766]: I0102 00:15:03.088587 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4" Jan 02 00:15:03 crc kubenswrapper[4766]: I0102 00:15:03.234336 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-config-volume\") pod \"200a900b-a8ea-4c56-b1d6-76e8a434d3f4\" (UID: \"200a900b-a8ea-4c56-b1d6-76e8a434d3f4\") " Jan 02 00:15:03 crc kubenswrapper[4766]: I0102 00:15:03.234443 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zp9jw\" (UniqueName: \"kubernetes.io/projected/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-kube-api-access-zp9jw\") pod \"200a900b-a8ea-4c56-b1d6-76e8a434d3f4\" (UID: \"200a900b-a8ea-4c56-b1d6-76e8a434d3f4\") " Jan 02 00:15:03 crc kubenswrapper[4766]: I0102 00:15:03.234509 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-secret-volume\") pod \"200a900b-a8ea-4c56-b1d6-76e8a434d3f4\" (UID: \"200a900b-a8ea-4c56-b1d6-76e8a434d3f4\") " Jan 02 00:15:03 crc kubenswrapper[4766]: I0102 00:15:03.236352 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-config-volume" (OuterVolumeSpecName: "config-volume") pod "200a900b-a8ea-4c56-b1d6-76e8a434d3f4" (UID: "200a900b-a8ea-4c56-b1d6-76e8a434d3f4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:15:03 crc kubenswrapper[4766]: I0102 00:15:03.239884 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "200a900b-a8ea-4c56-b1d6-76e8a434d3f4" (UID: "200a900b-a8ea-4c56-b1d6-76e8a434d3f4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:15:03 crc kubenswrapper[4766]: I0102 00:15:03.240110 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-kube-api-access-zp9jw" (OuterVolumeSpecName: "kube-api-access-zp9jw") pod "200a900b-a8ea-4c56-b1d6-76e8a434d3f4" (UID: "200a900b-a8ea-4c56-b1d6-76e8a434d3f4"). InnerVolumeSpecName "kube-api-access-zp9jw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:15:03 crc kubenswrapper[4766]: I0102 00:15:03.336634 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zp9jw\" (UniqueName: \"kubernetes.io/projected/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-kube-api-access-zp9jw\") on node \"crc\" DevicePath \"\"" Jan 02 00:15:03 crc kubenswrapper[4766]: I0102 00:15:03.336708 4766 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 02 00:15:03 crc kubenswrapper[4766]: I0102 00:15:03.336736 4766 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/200a900b-a8ea-4c56-b1d6-76e8a434d3f4-config-volume\") on node \"crc\" DevicePath \"\"" Jan 02 00:15:03 crc kubenswrapper[4766]: I0102 00:15:03.799579 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4" event={"ID":"200a900b-a8ea-4c56-b1d6-76e8a434d3f4","Type":"ContainerDied","Data":"16f715288488c04667c816c023fda565408080ecb6c759c21830d5a0f619b0ea"} Jan 02 00:15:03 crc kubenswrapper[4766]: I0102 00:15:03.799668 4766 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="16f715288488c04667c816c023fda565408080ecb6c759c21830d5a0f619b0ea" Jan 02 00:15:03 crc kubenswrapper[4766]: I0102 00:15:03.799666 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29455215-wpqg4" Jan 02 00:16:11 crc kubenswrapper[4766]: I0102 00:16:11.479599 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xps4h"] Jan 02 00:16:11 crc kubenswrapper[4766]: I0102 00:16:11.481723 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovn-controller" containerID="cri-o://e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d" gracePeriod=30 Jan 02 00:16:11 crc kubenswrapper[4766]: I0102 00:16:11.482083 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="northd" containerID="cri-o://fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55" gracePeriod=30 Jan 02 00:16:11 crc kubenswrapper[4766]: I0102 00:16:11.482310 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca" gracePeriod=30 Jan 02 00:16:11 crc kubenswrapper[4766]: I0102 00:16:11.482363 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="sbdb" containerID="cri-o://2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77" gracePeriod=30 Jan 02 00:16:11 crc kubenswrapper[4766]: I0102 00:16:11.482422 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="kube-rbac-proxy-node" containerID="cri-o://f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50" gracePeriod=30 Jan 02 00:16:11 crc kubenswrapper[4766]: I0102 00:16:11.482430 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="nbdb" containerID="cri-o://1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4" gracePeriod=30 Jan 02 00:16:11 crc kubenswrapper[4766]: I0102 00:16:11.482492 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovn-acl-logging" containerID="cri-o://8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e" gracePeriod=30 Jan 02 00:16:11 crc kubenswrapper[4766]: I0102 00:16:11.624254 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovnkube-controller" containerID="cri-o://65cb8e526ed37ac431d4269c186608701a392fb988e60503cca00e5ada8a3486" gracePeriod=30 Jan 02 00:16:13 crc kubenswrapper[4766]: E0102 00:16:13.130210 4766 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 65cb8e526ed37ac431d4269c186608701a392fb988e60503cca00e5ada8a3486 is running failed: container process not found" containerID="65cb8e526ed37ac431d4269c186608701a392fb988e60503cca00e5ada8a3486" cmd=["/bin/bash","-c","#!/bin/bash\ntest -f /etc/cni/net.d/10-ovn-kubernetes.conf\n"] Jan 02 00:16:13 crc kubenswrapper[4766]: E0102 00:16:13.131329 4766 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 65cb8e526ed37ac431d4269c186608701a392fb988e60503cca00e5ada8a3486 is running failed: container process not found" containerID="65cb8e526ed37ac431d4269c186608701a392fb988e60503cca00e5ada8a3486" cmd=["/bin/bash","-c","#!/bin/bash\ntest -f /etc/cni/net.d/10-ovn-kubernetes.conf\n"] Jan 02 00:16:13 crc kubenswrapper[4766]: E0102 00:16:13.131650 4766 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 65cb8e526ed37ac431d4269c186608701a392fb988e60503cca00e5ada8a3486 is running failed: container process not found" containerID="65cb8e526ed37ac431d4269c186608701a392fb988e60503cca00e5ada8a3486" cmd=["/bin/bash","-c","#!/bin/bash\ntest -f /etc/cni/net.d/10-ovn-kubernetes.conf\n"] Jan 02 00:16:13 crc kubenswrapper[4766]: E0102 00:16:13.131693 4766 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 65cb8e526ed37ac431d4269c186608701a392fb988e60503cca00e5ada8a3486 is running failed: container process not found" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovnkube-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.246053 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovnkube-controller/3.log" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.249497 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovn-acl-logging/0.log" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.250130 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovn-controller/0.log" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.250658 4766 generic.go:334] "Generic (PLEG): container finished" podID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerID="65cb8e526ed37ac431d4269c186608701a392fb988e60503cca00e5ada8a3486" exitCode=0 Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.250693 4766 generic.go:334] "Generic (PLEG): container finished" podID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerID="2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77" exitCode=0 Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.250709 4766 generic.go:334] "Generic (PLEG): container finished" podID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerID="1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4" exitCode=0 Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.250724 4766 generic.go:334] "Generic (PLEG): container finished" podID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerID="fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55" exitCode=0 Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.250737 4766 generic.go:334] "Generic (PLEG): container finished" podID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerID="b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca" exitCode=0 Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.250748 4766 generic.go:334] "Generic (PLEG): container finished" podID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerID="f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50" exitCode=0 Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.250760 4766 generic.go:334] "Generic (PLEG): container finished" podID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerID="8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e" exitCode=143 Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.250752 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerDied","Data":"65cb8e526ed37ac431d4269c186608701a392fb988e60503cca00e5ada8a3486"} Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.250772 4766 generic.go:334] "Generic (PLEG): container finished" podID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerID="e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d" exitCode=143 Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.250816 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerDied","Data":"2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77"} Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.250840 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerDied","Data":"1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4"} Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.250880 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerDied","Data":"fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55"} Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.250890 4766 scope.go:117] "RemoveContainer" containerID="e5bf838ca0799a30c0a828e12202e2c66af3473686ae514b8bbd94c369d9ac58" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.250898 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerDied","Data":"b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca"} Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.251019 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerDied","Data":"f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50"} Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.251040 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerDied","Data":"8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e"} Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.251053 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerDied","Data":"e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d"} Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.253323 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lmc2p_adc58583-1116-4066-9ca8-da659b367839/kube-multus/2.log" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.253799 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lmc2p_adc58583-1116-4066-9ca8-da659b367839/kube-multus/1.log" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.253886 4766 generic.go:334] "Generic (PLEG): container finished" podID="adc58583-1116-4066-9ca8-da659b367839" containerID="e096c1e05db993415c68cbf0f3312bb208f649a8216f555dbcb6cb286169308d" exitCode=2 Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.253913 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lmc2p" event={"ID":"adc58583-1116-4066-9ca8-da659b367839","Type":"ContainerDied","Data":"e096c1e05db993415c68cbf0f3312bb208f649a8216f555dbcb6cb286169308d"} Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.254425 4766 scope.go:117] "RemoveContainer" containerID="e096c1e05db993415c68cbf0f3312bb208f649a8216f555dbcb6cb286169308d" Jan 02 00:16:14 crc kubenswrapper[4766]: E0102 00:16:14.254703 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-lmc2p_openshift-multus(adc58583-1116-4066-9ca8-da659b367839)\"" pod="openshift-multus/multus-lmc2p" podUID="adc58583-1116-4066-9ca8-da659b367839" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.289456 4766 scope.go:117] "RemoveContainer" containerID="76e33f463186e4646a6c1af42efed221a9d759604e9de18c83e8eaaf1d52a1ea" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.463951 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovn-acl-logging/0.log" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.464587 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovn-controller/0.log" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.464972 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.511976 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-h9pxm"] Jan 02 00:16:14 crc kubenswrapper[4766]: E0102 00:16:14.512163 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovnkube-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512176 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovnkube-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: E0102 00:16:14.512184 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovnkube-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512190 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovnkube-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: E0102 00:16:14.512200 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="kubecfg-setup" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512209 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="kubecfg-setup" Jan 02 00:16:14 crc kubenswrapper[4766]: E0102 00:16:14.512218 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="nbdb" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512223 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="nbdb" Jan 02 00:16:14 crc kubenswrapper[4766]: E0102 00:16:14.512231 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="sbdb" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512236 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="sbdb" Jan 02 00:16:14 crc kubenswrapper[4766]: E0102 00:16:14.512246 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="northd" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512252 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="northd" Jan 02 00:16:14 crc kubenswrapper[4766]: E0102 00:16:14.512262 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovnkube-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512268 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovnkube-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: E0102 00:16:14.512275 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="200a900b-a8ea-4c56-b1d6-76e8a434d3f4" containerName="collect-profiles" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512281 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="200a900b-a8ea-4c56-b1d6-76e8a434d3f4" containerName="collect-profiles" Jan 02 00:16:14 crc kubenswrapper[4766]: E0102 00:16:14.512288 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovn-acl-logging" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512294 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovn-acl-logging" Jan 02 00:16:14 crc kubenswrapper[4766]: E0102 00:16:14.512305 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="kube-rbac-proxy-ovn-metrics" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512310 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="kube-rbac-proxy-ovn-metrics" Jan 02 00:16:14 crc kubenswrapper[4766]: E0102 00:16:14.512319 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovnkube-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512325 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovnkube-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: E0102 00:16:14.512332 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovn-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512337 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovn-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: E0102 00:16:14.512346 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="kube-rbac-proxy-node" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512351 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="kube-rbac-proxy-node" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512435 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovn-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512444 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovn-acl-logging" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512452 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovnkube-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512459 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="sbdb" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512466 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovnkube-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512473 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="200a900b-a8ea-4c56-b1d6-76e8a434d3f4" containerName="collect-profiles" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512481 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="kube-rbac-proxy-ovn-metrics" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512490 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="nbdb" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512497 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="kube-rbac-proxy-node" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512504 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovnkube-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512510 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovnkube-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512519 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="northd" Jan 02 00:16:14 crc kubenswrapper[4766]: E0102 00:16:14.512598 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovnkube-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512606 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovnkube-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.512692 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" containerName="ovnkube-controller" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.514092 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587504 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-cni-bin\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587544 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-systemd\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587582 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-env-overrides\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587597 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-var-lib-openvswitch\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587626 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovnkube-config\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587644 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-ovn\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587662 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-kubelet\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587696 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587719 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-systemd-units\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587738 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-openvswitch\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587754 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-slash\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587774 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovnkube-script-lib\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587790 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-run-netns\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587814 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-log-socket\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587835 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbsns\" (UniqueName: \"kubernetes.io/projected/66b7fd44-a7ca-46a0-a585-cb37baccc49e-kube-api-access-sbsns\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587881 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-cni-netd\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587910 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-node-log\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587942 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovn-node-metrics-cert\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587966 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-etc-openvswitch\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.587982 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-run-ovn-kubernetes\") pod \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\" (UID: \"66b7fd44-a7ca-46a0-a585-cb37baccc49e\") " Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588028 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588089 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-slash" (OuterVolumeSpecName: "host-slash") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588137 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588235 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588271 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588325 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-log-socket" (OuterVolumeSpecName: "log-socket") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588318 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588350 4766 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588363 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-node-log" (OuterVolumeSpecName: "node-log") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588377 4766 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588398 4766 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588428 4766 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588445 4766 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-slash\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588366 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588382 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588391 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588404 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588404 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588422 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588525 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588584 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.588733 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.594493 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.595721 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66b7fd44-a7ca-46a0-a585-cb37baccc49e-kube-api-access-sbsns" (OuterVolumeSpecName: "kube-api-access-sbsns") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "kube-api-access-sbsns". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.602713 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "66b7fd44-a7ca-46a0-a585-cb37baccc49e" (UID: "66b7fd44-a7ca-46a0-a585-cb37baccc49e"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.689538 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b8a813f9-1c39-42a7-85ad-461e2d931969-ovnkube-config\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.689588 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-run-ovn\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.689608 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-run-ovn-kubernetes\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.689631 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-log-socket\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.689654 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-slash\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.689679 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-node-log\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.689701 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-run-netns\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.689729 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxmm9\" (UniqueName: \"kubernetes.io/projected/b8a813f9-1c39-42a7-85ad-461e2d931969-kube-api-access-bxmm9\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.689749 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-kubelet\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.689778 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b8a813f9-1c39-42a7-85ad-461e2d931969-ovn-node-metrics-cert\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.689801 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b8a813f9-1c39-42a7-85ad-461e2d931969-ovnkube-script-lib\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.689859 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-etc-openvswitch\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.689880 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-run-openvswitch\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.689910 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-run-systemd\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.689972 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-var-lib-openvswitch\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.690005 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-cni-netd\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.690081 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-systemd-units\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.690118 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-cni-bin\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.690160 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b8a813f9-1c39-42a7-85ad-461e2d931969-env-overrides\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.690192 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.690692 4766 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.690825 4766 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.690884 4766 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.690905 4766 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.690921 4766 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.690935 4766 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.690946 4766 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.690957 4766 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-log-socket\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.690968 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbsns\" (UniqueName: \"kubernetes.io/projected/66b7fd44-a7ca-46a0-a585-cb37baccc49e-kube-api-access-sbsns\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.690982 4766 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.690992 4766 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-node-log\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.691003 4766 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/66b7fd44-a7ca-46a0-a585-cb37baccc49e-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.691013 4766 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.691024 4766 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.691054 4766 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/66b7fd44-a7ca-46a0-a585-cb37baccc49e-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.791803 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-var-lib-openvswitch\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.791918 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-cni-netd\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.791961 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-systemd-units\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.791996 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-var-lib-openvswitch\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792002 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-cni-bin\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792048 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-cni-bin\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792099 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b8a813f9-1c39-42a7-85ad-461e2d931969-env-overrides\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792126 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-cni-netd\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792165 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-systemd-units\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792148 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792188 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792297 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b8a813f9-1c39-42a7-85ad-461e2d931969-ovnkube-config\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792418 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-run-ovn\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792461 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-run-ovn-kubernetes\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792486 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-run-ovn\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792501 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-log-socket\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792545 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-log-socket\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792550 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-run-ovn-kubernetes\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792554 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-slash\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792606 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-slash\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792627 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-node-log\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792664 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-node-log\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792680 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-run-netns\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792726 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxmm9\" (UniqueName: \"kubernetes.io/projected/b8a813f9-1c39-42a7-85ad-461e2d931969-kube-api-access-bxmm9\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792765 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-kubelet\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792797 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b8a813f9-1c39-42a7-85ad-461e2d931969-ovn-node-metrics-cert\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792728 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-run-netns\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792837 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b8a813f9-1c39-42a7-85ad-461e2d931969-ovnkube-script-lib\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.792927 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-host-kubelet\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.793006 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-etc-openvswitch\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.793074 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-run-openvswitch\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.793149 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-run-systemd\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.793162 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-etc-openvswitch\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.793189 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b8a813f9-1c39-42a7-85ad-461e2d931969-env-overrides\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.793259 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-run-openvswitch\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.793274 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b8a813f9-1c39-42a7-85ad-461e2d931969-run-systemd\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.793561 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b8a813f9-1c39-42a7-85ad-461e2d931969-ovnkube-config\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.794003 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b8a813f9-1c39-42a7-85ad-461e2d931969-ovnkube-script-lib\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.798042 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b8a813f9-1c39-42a7-85ad-461e2d931969-ovn-node-metrics-cert\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.833403 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxmm9\" (UniqueName: \"kubernetes.io/projected/b8a813f9-1c39-42a7-85ad-461e2d931969-kube-api-access-bxmm9\") pod \"ovnkube-node-h9pxm\" (UID: \"b8a813f9-1c39-42a7-85ad-461e2d931969\") " pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:14 crc kubenswrapper[4766]: I0102 00:16:14.838905 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.263808 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lmc2p_adc58583-1116-4066-9ca8-da659b367839/kube-multus/2.log" Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.265408 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" event={"ID":"b8a813f9-1c39-42a7-85ad-461e2d931969","Type":"ContainerStarted","Data":"d6abf84705bc20e218696ccb2c9e4941d3cbaa7c916cdd5e78dd5ba427b895b1"} Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.270056 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovn-acl-logging/0.log" Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.270796 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xps4h_66b7fd44-a7ca-46a0-a585-cb37baccc49e/ovn-controller/0.log" Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.271225 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" event={"ID":"66b7fd44-a7ca-46a0-a585-cb37baccc49e","Type":"ContainerDied","Data":"3a4095793733b3ef5eb750899e9f104983770b40d96ac0c571b92c2fbfed474a"} Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.271272 4766 scope.go:117] "RemoveContainer" containerID="65cb8e526ed37ac431d4269c186608701a392fb988e60503cca00e5ada8a3486" Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.271637 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xps4h" Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.329404 4766 scope.go:117] "RemoveContainer" containerID="2f71bf7264bb63d83c096ce1ad36ff22d9e047449540ac4c7c869e70b3cffb77" Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.347580 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xps4h"] Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.356033 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xps4h"] Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.359724 4766 scope.go:117] "RemoveContainer" containerID="1f4db0bc923e83da06fb13e3cc842c06176fe184bd914495502280c6d46caad4" Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.373934 4766 scope.go:117] "RemoveContainer" containerID="fc7ac500e490b3180865c3104dff8afbbb6d49fc57e7a6ce54d7e074cfcd4f55" Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.391637 4766 scope.go:117] "RemoveContainer" containerID="b1cd6832ba0350c944bf99e54c3db1a2d2b378e2368ce04ee35ac150e57be7ca" Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.415418 4766 scope.go:117] "RemoveContainer" containerID="f564132a69a32efa4ebca518df2e58ce479b3e6fbf903f9876a962643cc86d50" Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.431505 4766 scope.go:117] "RemoveContainer" containerID="8839dc13cccab2af9cb93d685f8cca05e94d89bacf834952e14646d1bc8d0d1e" Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.447036 4766 scope.go:117] "RemoveContainer" containerID="e18df0d65c0a3b3585a5dcaba9a84fd399e6bfa8ba4ba1cd692a51265e6ad94d" Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.463412 4766 scope.go:117] "RemoveContainer" containerID="eb18a3cd0651c802f186f04817aaf9d19fa68ca6c69d3b6ccad920843848412f" Jan 02 00:16:15 crc kubenswrapper[4766]: I0102 00:16:15.545403 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66b7fd44-a7ca-46a0-a585-cb37baccc49e" path="/var/lib/kubelet/pods/66b7fd44-a7ca-46a0-a585-cb37baccc49e/volumes" Jan 02 00:16:16 crc kubenswrapper[4766]: I0102 00:16:16.280347 4766 generic.go:334] "Generic (PLEG): container finished" podID="b8a813f9-1c39-42a7-85ad-461e2d931969" containerID="c0d0e9e29a009a52a3689494509cb25ec4825413f4112b0dd5632fe56e0df704" exitCode=0 Jan 02 00:16:16 crc kubenswrapper[4766]: I0102 00:16:16.280417 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" event={"ID":"b8a813f9-1c39-42a7-85ad-461e2d931969","Type":"ContainerDied","Data":"c0d0e9e29a009a52a3689494509cb25ec4825413f4112b0dd5632fe56e0df704"} Jan 02 00:16:17 crc kubenswrapper[4766]: I0102 00:16:17.294391 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" event={"ID":"b8a813f9-1c39-42a7-85ad-461e2d931969","Type":"ContainerStarted","Data":"408113a919d9900d0d0e0f451ba58f0b52129ced5af4d508a10b4212a8c71e96"} Jan 02 00:16:17 crc kubenswrapper[4766]: I0102 00:16:17.294946 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" event={"ID":"b8a813f9-1c39-42a7-85ad-461e2d931969","Type":"ContainerStarted","Data":"5a36c7617e3718842753a13a5cff9ad42d8797452baf5d68efafa281cef6821f"} Jan 02 00:16:17 crc kubenswrapper[4766]: I0102 00:16:17.295181 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" event={"ID":"b8a813f9-1c39-42a7-85ad-461e2d931969","Type":"ContainerStarted","Data":"ee9eb7c7946e3bfc214d403aa9a33dadc3a92d897c3802e8b17f2a71077eacb1"} Jan 02 00:16:17 crc kubenswrapper[4766]: I0102 00:16:17.295251 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" event={"ID":"b8a813f9-1c39-42a7-85ad-461e2d931969","Type":"ContainerStarted","Data":"fb9b8a927765a4f392e0a31417acf60d6449dea3129eb01058ebd19fea13efcf"} Jan 02 00:16:17 crc kubenswrapper[4766]: I0102 00:16:17.295309 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" event={"ID":"b8a813f9-1c39-42a7-85ad-461e2d931969","Type":"ContainerStarted","Data":"3c38ef517a0bf9b06ce8c5aa0c7394d91938934626f7f0685105d5b7bbfa780c"} Jan 02 00:16:18 crc kubenswrapper[4766]: I0102 00:16:18.304442 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" event={"ID":"b8a813f9-1c39-42a7-85ad-461e2d931969","Type":"ContainerStarted","Data":"1aa553fd86360cc8a00880be97c06ae2f6f1065c8eb18b06c2d734c7aa75c3c0"} Jan 02 00:16:20 crc kubenswrapper[4766]: I0102 00:16:20.320604 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" event={"ID":"b8a813f9-1c39-42a7-85ad-461e2d931969","Type":"ContainerStarted","Data":"0266eebdf7c48401d28f3554059fe3696d50fe717fc02f1e77123bc0ab7d53ca"} Jan 02 00:16:23 crc kubenswrapper[4766]: I0102 00:16:23.346387 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" event={"ID":"b8a813f9-1c39-42a7-85ad-461e2d931969","Type":"ContainerStarted","Data":"592c1b0f5feffc5c2ba5285afbbdc6e4cd8c73f128baa446f917b1402f187d53"} Jan 02 00:16:23 crc kubenswrapper[4766]: I0102 00:16:23.346972 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:23 crc kubenswrapper[4766]: I0102 00:16:23.346990 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:23 crc kubenswrapper[4766]: I0102 00:16:23.388583 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:23 crc kubenswrapper[4766]: I0102 00:16:23.409892 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" podStartSLOduration=9.409841544 podStartE2EDuration="9.409841544s" podCreationTimestamp="2026-01-02 00:16:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:16:23.393806471 +0000 UTC m=+558.192293645" watchObservedRunningTime="2026-01-02 00:16:23.409841544 +0000 UTC m=+558.208328708" Jan 02 00:16:24 crc kubenswrapper[4766]: I0102 00:16:24.181494 4766 patch_prober.go:28] interesting pod/machine-config-daemon-hlsf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 02 00:16:24 crc kubenswrapper[4766]: I0102 00:16:24.181562 4766 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 02 00:16:24 crc kubenswrapper[4766]: I0102 00:16:24.354195 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:24 crc kubenswrapper[4766]: I0102 00:16:24.400692 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:26 crc kubenswrapper[4766]: I0102 00:16:26.534343 4766 scope.go:117] "RemoveContainer" containerID="e096c1e05db993415c68cbf0f3312bb208f649a8216f555dbcb6cb286169308d" Jan 02 00:16:26 crc kubenswrapper[4766]: E0102 00:16:26.534757 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-lmc2p_openshift-multus(adc58583-1116-4066-9ca8-da659b367839)\"" pod="openshift-multus/multus-lmc2p" podUID="adc58583-1116-4066-9ca8-da659b367839" Jan 02 00:16:38 crc kubenswrapper[4766]: I0102 00:16:38.534252 4766 scope.go:117] "RemoveContainer" containerID="e096c1e05db993415c68cbf0f3312bb208f649a8216f555dbcb6cb286169308d" Jan 02 00:16:39 crc kubenswrapper[4766]: I0102 00:16:39.456640 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lmc2p_adc58583-1116-4066-9ca8-da659b367839/kube-multus/2.log" Jan 02 00:16:39 crc kubenswrapper[4766]: I0102 00:16:39.457546 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lmc2p" event={"ID":"adc58583-1116-4066-9ca8-da659b367839","Type":"ContainerStarted","Data":"a33e9849ba20355bc3c1427ddb2979ba3a789f013cdab7789961904f5370ce21"} Jan 02 00:16:44 crc kubenswrapper[4766]: I0102 00:16:44.871062 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-h9pxm" Jan 02 00:16:54 crc kubenswrapper[4766]: I0102 00:16:54.180627 4766 patch_prober.go:28] interesting pod/machine-config-daemon-hlsf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 02 00:16:54 crc kubenswrapper[4766]: I0102 00:16:54.181123 4766 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 02 00:17:17 crc kubenswrapper[4766]: I0102 00:17:17.401058 4766 scope.go:117] "RemoveContainer" containerID="f7e2716442b76e0655a9600ffba742884cb635040f141129d51029fc81b2d7e5" Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.083775 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-x4q7d"] Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.084690 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-x4q7d" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" containerName="registry-server" containerID="cri-o://5c50bc9bb574a098ae0a626687a2293d4ca43484519861cdac73f84ea120fac9" gracePeriod=30 Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.443786 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x4q7d" Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.583615 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hxz9\" (UniqueName: \"kubernetes.io/projected/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-kube-api-access-5hxz9\") pod \"40c7eea3-f2b9-4769-a43f-d31eb3ff193f\" (UID: \"40c7eea3-f2b9-4769-a43f-d31eb3ff193f\") " Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.583679 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-catalog-content\") pod \"40c7eea3-f2b9-4769-a43f-d31eb3ff193f\" (UID: \"40c7eea3-f2b9-4769-a43f-d31eb3ff193f\") " Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.583789 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-utilities\") pod \"40c7eea3-f2b9-4769-a43f-d31eb3ff193f\" (UID: \"40c7eea3-f2b9-4769-a43f-d31eb3ff193f\") " Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.584942 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-utilities" (OuterVolumeSpecName: "utilities") pod "40c7eea3-f2b9-4769-a43f-d31eb3ff193f" (UID: "40c7eea3-f2b9-4769-a43f-d31eb3ff193f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.592031 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-kube-api-access-5hxz9" (OuterVolumeSpecName: "kube-api-access-5hxz9") pod "40c7eea3-f2b9-4769-a43f-d31eb3ff193f" (UID: "40c7eea3-f2b9-4769-a43f-d31eb3ff193f"). InnerVolumeSpecName "kube-api-access-5hxz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.628470 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "40c7eea3-f2b9-4769-a43f-d31eb3ff193f" (UID: "40c7eea3-f2b9-4769-a43f-d31eb3ff193f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.685641 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hxz9\" (UniqueName: \"kubernetes.io/projected/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-kube-api-access-5hxz9\") on node \"crc\" DevicePath \"\"" Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.685708 4766 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.685737 4766 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40c7eea3-f2b9-4769-a43f-d31eb3ff193f-utilities\") on node \"crc\" DevicePath \"\"" Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.742894 4766 generic.go:334] "Generic (PLEG): container finished" podID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" containerID="5c50bc9bb574a098ae0a626687a2293d4ca43484519861cdac73f84ea120fac9" exitCode=0 Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.742955 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x4q7d" event={"ID":"40c7eea3-f2b9-4769-a43f-d31eb3ff193f","Type":"ContainerDied","Data":"5c50bc9bb574a098ae0a626687a2293d4ca43484519861cdac73f84ea120fac9"} Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.743005 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x4q7d" Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.743038 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x4q7d" event={"ID":"40c7eea3-f2b9-4769-a43f-d31eb3ff193f","Type":"ContainerDied","Data":"a31f0dda2da569ead918cb59be21f9e9da4a7c835dedf8137e92528e35da4daf"} Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.743080 4766 scope.go:117] "RemoveContainer" containerID="5c50bc9bb574a098ae0a626687a2293d4ca43484519861cdac73f84ea120fac9" Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.776137 4766 scope.go:117] "RemoveContainer" containerID="de615bfe9f1efeeba1e93ac5cd0837637cbe25f947275a877a8829b78d21217d" Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.784200 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-x4q7d"] Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.787649 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-x4q7d"] Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.807254 4766 scope.go:117] "RemoveContainer" containerID="bb65587edc630ca31e867f325ea6ab24d0a7a2765550712182ac9740fa8997bd" Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.824008 4766 scope.go:117] "RemoveContainer" containerID="5c50bc9bb574a098ae0a626687a2293d4ca43484519861cdac73f84ea120fac9" Jan 02 00:17:23 crc kubenswrapper[4766]: E0102 00:17:23.824324 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c50bc9bb574a098ae0a626687a2293d4ca43484519861cdac73f84ea120fac9\": container with ID starting with 5c50bc9bb574a098ae0a626687a2293d4ca43484519861cdac73f84ea120fac9 not found: ID does not exist" containerID="5c50bc9bb574a098ae0a626687a2293d4ca43484519861cdac73f84ea120fac9" Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.824374 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c50bc9bb574a098ae0a626687a2293d4ca43484519861cdac73f84ea120fac9"} err="failed to get container status \"5c50bc9bb574a098ae0a626687a2293d4ca43484519861cdac73f84ea120fac9\": rpc error: code = NotFound desc = could not find container \"5c50bc9bb574a098ae0a626687a2293d4ca43484519861cdac73f84ea120fac9\": container with ID starting with 5c50bc9bb574a098ae0a626687a2293d4ca43484519861cdac73f84ea120fac9 not found: ID does not exist" Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.824401 4766 scope.go:117] "RemoveContainer" containerID="de615bfe9f1efeeba1e93ac5cd0837637cbe25f947275a877a8829b78d21217d" Jan 02 00:17:23 crc kubenswrapper[4766]: E0102 00:17:23.824646 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de615bfe9f1efeeba1e93ac5cd0837637cbe25f947275a877a8829b78d21217d\": container with ID starting with de615bfe9f1efeeba1e93ac5cd0837637cbe25f947275a877a8829b78d21217d not found: ID does not exist" containerID="de615bfe9f1efeeba1e93ac5cd0837637cbe25f947275a877a8829b78d21217d" Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.824671 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de615bfe9f1efeeba1e93ac5cd0837637cbe25f947275a877a8829b78d21217d"} err="failed to get container status \"de615bfe9f1efeeba1e93ac5cd0837637cbe25f947275a877a8829b78d21217d\": rpc error: code = NotFound desc = could not find container \"de615bfe9f1efeeba1e93ac5cd0837637cbe25f947275a877a8829b78d21217d\": container with ID starting with de615bfe9f1efeeba1e93ac5cd0837637cbe25f947275a877a8829b78d21217d not found: ID does not exist" Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.824691 4766 scope.go:117] "RemoveContainer" containerID="bb65587edc630ca31e867f325ea6ab24d0a7a2765550712182ac9740fa8997bd" Jan 02 00:17:23 crc kubenswrapper[4766]: E0102 00:17:23.825285 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb65587edc630ca31e867f325ea6ab24d0a7a2765550712182ac9740fa8997bd\": container with ID starting with bb65587edc630ca31e867f325ea6ab24d0a7a2765550712182ac9740fa8997bd not found: ID does not exist" containerID="bb65587edc630ca31e867f325ea6ab24d0a7a2765550712182ac9740fa8997bd" Jan 02 00:17:23 crc kubenswrapper[4766]: I0102 00:17:23.825310 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb65587edc630ca31e867f325ea6ab24d0a7a2765550712182ac9740fa8997bd"} err="failed to get container status \"bb65587edc630ca31e867f325ea6ab24d0a7a2765550712182ac9740fa8997bd\": rpc error: code = NotFound desc = could not find container \"bb65587edc630ca31e867f325ea6ab24d0a7a2765550712182ac9740fa8997bd\": container with ID starting with bb65587edc630ca31e867f325ea6ab24d0a7a2765550712182ac9740fa8997bd not found: ID does not exist" Jan 02 00:17:24 crc kubenswrapper[4766]: I0102 00:17:24.180403 4766 patch_prober.go:28] interesting pod/machine-config-daemon-hlsf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 02 00:17:24 crc kubenswrapper[4766]: I0102 00:17:24.180473 4766 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 02 00:17:24 crc kubenswrapper[4766]: I0102 00:17:24.180517 4766 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:17:24 crc kubenswrapper[4766]: I0102 00:17:24.181057 4766 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a3a7bcfe3f9b4401d1f157274622d2a7ef5d28b924c1e0978ebb8ff12e7ce91d"} pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 02 00:17:24 crc kubenswrapper[4766]: I0102 00:17:24.181109 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" containerID="cri-o://a3a7bcfe3f9b4401d1f157274622d2a7ef5d28b924c1e0978ebb8ff12e7ce91d" gracePeriod=600 Jan 02 00:17:24 crc kubenswrapper[4766]: I0102 00:17:24.751335 4766 generic.go:334] "Generic (PLEG): container finished" podID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerID="a3a7bcfe3f9b4401d1f157274622d2a7ef5d28b924c1e0978ebb8ff12e7ce91d" exitCode=0 Jan 02 00:17:24 crc kubenswrapper[4766]: I0102 00:17:24.751383 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" event={"ID":"a62872f4-c5c6-4f43-ba8a-40e0c96e6968","Type":"ContainerDied","Data":"a3a7bcfe3f9b4401d1f157274622d2a7ef5d28b924c1e0978ebb8ff12e7ce91d"} Jan 02 00:17:24 crc kubenswrapper[4766]: I0102 00:17:24.751937 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" event={"ID":"a62872f4-c5c6-4f43-ba8a-40e0c96e6968","Type":"ContainerStarted","Data":"690529f4c1451fbb7a4daf40afad9fe675d9ba0699ee41538a56e7145e9d3df2"} Jan 02 00:17:24 crc kubenswrapper[4766]: I0102 00:17:24.751967 4766 scope.go:117] "RemoveContainer" containerID="bce71262223102a1e221708ed0cb2549b154d42a224262c021bd129659081e5c" Jan 02 00:17:25 crc kubenswrapper[4766]: I0102 00:17:25.544676 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" path="/var/lib/kubelet/pods/40c7eea3-f2b9-4769-a43f-d31eb3ff193f/volumes" Jan 02 00:17:26 crc kubenswrapper[4766]: I0102 00:17:26.848577 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f"] Jan 02 00:17:26 crc kubenswrapper[4766]: E0102 00:17:26.850127 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" containerName="extract-content" Jan 02 00:17:26 crc kubenswrapper[4766]: I0102 00:17:26.850288 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" containerName="extract-content" Jan 02 00:17:26 crc kubenswrapper[4766]: E0102 00:17:26.850426 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" containerName="extract-utilities" Jan 02 00:17:26 crc kubenswrapper[4766]: I0102 00:17:26.850536 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" containerName="extract-utilities" Jan 02 00:17:26 crc kubenswrapper[4766]: E0102 00:17:26.850650 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" containerName="registry-server" Jan 02 00:17:26 crc kubenswrapper[4766]: I0102 00:17:26.850765 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" containerName="registry-server" Jan 02 00:17:26 crc kubenswrapper[4766]: I0102 00:17:26.851131 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="40c7eea3-f2b9-4769-a43f-d31eb3ff193f" containerName="registry-server" Jan 02 00:17:26 crc kubenswrapper[4766]: I0102 00:17:26.852599 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" Jan 02 00:17:26 crc kubenswrapper[4766]: I0102 00:17:26.854996 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 02 00:17:26 crc kubenswrapper[4766]: I0102 00:17:26.865140 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f"] Jan 02 00:17:26 crc kubenswrapper[4766]: I0102 00:17:26.959470 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlx2j\" (UniqueName: \"kubernetes.io/projected/1270eea2-c753-4f07-b4bb-b592f4d04827-kube-api-access-mlx2j\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f\" (UID: \"1270eea2-c753-4f07-b4bb-b592f4d04827\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" Jan 02 00:17:26 crc kubenswrapper[4766]: I0102 00:17:26.959525 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1270eea2-c753-4f07-b4bb-b592f4d04827-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f\" (UID: \"1270eea2-c753-4f07-b4bb-b592f4d04827\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" Jan 02 00:17:26 crc kubenswrapper[4766]: I0102 00:17:26.959605 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1270eea2-c753-4f07-b4bb-b592f4d04827-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f\" (UID: \"1270eea2-c753-4f07-b4bb-b592f4d04827\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" Jan 02 00:17:27 crc kubenswrapper[4766]: I0102 00:17:27.060551 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1270eea2-c753-4f07-b4bb-b592f4d04827-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f\" (UID: \"1270eea2-c753-4f07-b4bb-b592f4d04827\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" Jan 02 00:17:27 crc kubenswrapper[4766]: I0102 00:17:27.060632 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlx2j\" (UniqueName: \"kubernetes.io/projected/1270eea2-c753-4f07-b4bb-b592f4d04827-kube-api-access-mlx2j\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f\" (UID: \"1270eea2-c753-4f07-b4bb-b592f4d04827\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" Jan 02 00:17:27 crc kubenswrapper[4766]: I0102 00:17:27.060661 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1270eea2-c753-4f07-b4bb-b592f4d04827-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f\" (UID: \"1270eea2-c753-4f07-b4bb-b592f4d04827\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" Jan 02 00:17:27 crc kubenswrapper[4766]: I0102 00:17:27.061241 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1270eea2-c753-4f07-b4bb-b592f4d04827-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f\" (UID: \"1270eea2-c753-4f07-b4bb-b592f4d04827\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" Jan 02 00:17:27 crc kubenswrapper[4766]: I0102 00:17:27.061714 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1270eea2-c753-4f07-b4bb-b592f4d04827-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f\" (UID: \"1270eea2-c753-4f07-b4bb-b592f4d04827\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" Jan 02 00:17:27 crc kubenswrapper[4766]: I0102 00:17:27.081507 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlx2j\" (UniqueName: \"kubernetes.io/projected/1270eea2-c753-4f07-b4bb-b592f4d04827-kube-api-access-mlx2j\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f\" (UID: \"1270eea2-c753-4f07-b4bb-b592f4d04827\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" Jan 02 00:17:27 crc kubenswrapper[4766]: I0102 00:17:27.186329 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" Jan 02 00:17:27 crc kubenswrapper[4766]: I0102 00:17:27.347858 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f"] Jan 02 00:17:27 crc kubenswrapper[4766]: W0102 00:17:27.356029 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1270eea2_c753_4f07_b4bb_b592f4d04827.slice/crio-b247050a82de31884ef755b241cf8d27cf53fa69dd053d1d3f5325034ed586ef WatchSource:0}: Error finding container b247050a82de31884ef755b241cf8d27cf53fa69dd053d1d3f5325034ed586ef: Status 404 returned error can't find the container with id b247050a82de31884ef755b241cf8d27cf53fa69dd053d1d3f5325034ed586ef Jan 02 00:17:27 crc kubenswrapper[4766]: I0102 00:17:27.773249 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" event={"ID":"1270eea2-c753-4f07-b4bb-b592f4d04827","Type":"ContainerStarted","Data":"b247050a82de31884ef755b241cf8d27cf53fa69dd053d1d3f5325034ed586ef"} Jan 02 00:17:28 crc kubenswrapper[4766]: I0102 00:17:28.780029 4766 generic.go:334] "Generic (PLEG): container finished" podID="1270eea2-c753-4f07-b4bb-b592f4d04827" containerID="5bb5d168c419e546055c0ea0d3c771f75b316edc6c66ffaafeb3e5ab5828e916" exitCode=0 Jan 02 00:17:28 crc kubenswrapper[4766]: I0102 00:17:28.780096 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" event={"ID":"1270eea2-c753-4f07-b4bb-b592f4d04827","Type":"ContainerDied","Data":"5bb5d168c419e546055c0ea0d3c771f75b316edc6c66ffaafeb3e5ab5828e916"} Jan 02 00:17:28 crc kubenswrapper[4766]: I0102 00:17:28.782124 4766 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 02 00:17:30 crc kubenswrapper[4766]: I0102 00:17:30.798539 4766 generic.go:334] "Generic (PLEG): container finished" podID="1270eea2-c753-4f07-b4bb-b592f4d04827" containerID="45d48be67e39f75e977a7a3214120f87792340fb43bf292d9cdfc63c580bf0d2" exitCode=0 Jan 02 00:17:30 crc kubenswrapper[4766]: I0102 00:17:30.798690 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" event={"ID":"1270eea2-c753-4f07-b4bb-b592f4d04827","Type":"ContainerDied","Data":"45d48be67e39f75e977a7a3214120f87792340fb43bf292d9cdfc63c580bf0d2"} Jan 02 00:17:31 crc kubenswrapper[4766]: I0102 00:17:31.809745 4766 generic.go:334] "Generic (PLEG): container finished" podID="1270eea2-c753-4f07-b4bb-b592f4d04827" containerID="653ef12f608faa37a218da86bb69783cadfe8fb283343af977cb695d445ca380" exitCode=0 Jan 02 00:17:31 crc kubenswrapper[4766]: I0102 00:17:31.809892 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" event={"ID":"1270eea2-c753-4f07-b4bb-b592f4d04827","Type":"ContainerDied","Data":"653ef12f608faa37a218da86bb69783cadfe8fb283343af977cb695d445ca380"} Jan 02 00:17:32 crc kubenswrapper[4766]: I0102 00:17:32.842216 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq"] Jan 02 00:17:32 crc kubenswrapper[4766]: I0102 00:17:32.843702 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" Jan 02 00:17:32 crc kubenswrapper[4766]: I0102 00:17:32.859577 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq"] Jan 02 00:17:32 crc kubenswrapper[4766]: I0102 00:17:32.939229 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rhml\" (UniqueName: \"kubernetes.io/projected/206c274e-ae59-471d-b0ca-5f6f34b28706-kube-api-access-4rhml\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq\" (UID: \"206c274e-ae59-471d-b0ca-5f6f34b28706\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" Jan 02 00:17:32 crc kubenswrapper[4766]: I0102 00:17:32.939276 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/206c274e-ae59-471d-b0ca-5f6f34b28706-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq\" (UID: \"206c274e-ae59-471d-b0ca-5f6f34b28706\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" Jan 02 00:17:32 crc kubenswrapper[4766]: I0102 00:17:32.939325 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/206c274e-ae59-471d-b0ca-5f6f34b28706-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq\" (UID: \"206c274e-ae59-471d-b0ca-5f6f34b28706\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.040563 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rhml\" (UniqueName: \"kubernetes.io/projected/206c274e-ae59-471d-b0ca-5f6f34b28706-kube-api-access-4rhml\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq\" (UID: \"206c274e-ae59-471d-b0ca-5f6f34b28706\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.040886 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/206c274e-ae59-471d-b0ca-5f6f34b28706-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq\" (UID: \"206c274e-ae59-471d-b0ca-5f6f34b28706\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.040994 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/206c274e-ae59-471d-b0ca-5f6f34b28706-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq\" (UID: \"206c274e-ae59-471d-b0ca-5f6f34b28706\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.041363 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/206c274e-ae59-471d-b0ca-5f6f34b28706-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq\" (UID: \"206c274e-ae59-471d-b0ca-5f6f34b28706\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.041432 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/206c274e-ae59-471d-b0ca-5f6f34b28706-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq\" (UID: \"206c274e-ae59-471d-b0ca-5f6f34b28706\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.062153 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rhml\" (UniqueName: \"kubernetes.io/projected/206c274e-ae59-471d-b0ca-5f6f34b28706-kube-api-access-4rhml\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq\" (UID: \"206c274e-ae59-471d-b0ca-5f6f34b28706\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.113200 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.199547 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.242967 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1270eea2-c753-4f07-b4bb-b592f4d04827-bundle\") pod \"1270eea2-c753-4f07-b4bb-b592f4d04827\" (UID: \"1270eea2-c753-4f07-b4bb-b592f4d04827\") " Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.243043 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlx2j\" (UniqueName: \"kubernetes.io/projected/1270eea2-c753-4f07-b4bb-b592f4d04827-kube-api-access-mlx2j\") pod \"1270eea2-c753-4f07-b4bb-b592f4d04827\" (UID: \"1270eea2-c753-4f07-b4bb-b592f4d04827\") " Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.243178 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1270eea2-c753-4f07-b4bb-b592f4d04827-util\") pod \"1270eea2-c753-4f07-b4bb-b592f4d04827\" (UID: \"1270eea2-c753-4f07-b4bb-b592f4d04827\") " Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.246439 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1270eea2-c753-4f07-b4bb-b592f4d04827-bundle" (OuterVolumeSpecName: "bundle") pod "1270eea2-c753-4f07-b4bb-b592f4d04827" (UID: "1270eea2-c753-4f07-b4bb-b592f4d04827"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.250600 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1270eea2-c753-4f07-b4bb-b592f4d04827-kube-api-access-mlx2j" (OuterVolumeSpecName: "kube-api-access-mlx2j") pod "1270eea2-c753-4f07-b4bb-b592f4d04827" (UID: "1270eea2-c753-4f07-b4bb-b592f4d04827"). InnerVolumeSpecName "kube-api-access-mlx2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.259346 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1270eea2-c753-4f07-b4bb-b592f4d04827-util" (OuterVolumeSpecName: "util") pod "1270eea2-c753-4f07-b4bb-b592f4d04827" (UID: "1270eea2-c753-4f07-b4bb-b592f4d04827"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.345614 4766 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1270eea2-c753-4f07-b4bb-b592f4d04827-util\") on node \"crc\" DevicePath \"\"" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.345872 4766 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1270eea2-c753-4f07-b4bb-b592f4d04827-bundle\") on node \"crc\" DevicePath \"\"" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.345885 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlx2j\" (UniqueName: \"kubernetes.io/projected/1270eea2-c753-4f07-b4bb-b592f4d04827-kube-api-access-mlx2j\") on node \"crc\" DevicePath \"\"" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.408680 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq"] Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.827709 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" event={"ID":"1270eea2-c753-4f07-b4bb-b592f4d04827","Type":"ContainerDied","Data":"b247050a82de31884ef755b241cf8d27cf53fa69dd053d1d3f5325034ed586ef"} Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.828180 4766 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b247050a82de31884ef755b241cf8d27cf53fa69dd053d1d3f5325034ed586ef" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.828287 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.833110 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6"] Jan 02 00:17:33 crc kubenswrapper[4766]: E0102 00:17:33.833519 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1270eea2-c753-4f07-b4bb-b592f4d04827" containerName="util" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.833555 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="1270eea2-c753-4f07-b4bb-b592f4d04827" containerName="util" Jan 02 00:17:33 crc kubenswrapper[4766]: E0102 00:17:33.833595 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1270eea2-c753-4f07-b4bb-b592f4d04827" containerName="extract" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.833610 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="1270eea2-c753-4f07-b4bb-b592f4d04827" containerName="extract" Jan 02 00:17:33 crc kubenswrapper[4766]: E0102 00:17:33.833629 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1270eea2-c753-4f07-b4bb-b592f4d04827" containerName="pull" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.833642 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="1270eea2-c753-4f07-b4bb-b592f4d04827" containerName="pull" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.833811 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="1270eea2-c753-4f07-b4bb-b592f4d04827" containerName="extract" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.834449 4766 generic.go:334] "Generic (PLEG): container finished" podID="206c274e-ae59-471d-b0ca-5f6f34b28706" containerID="68e6e3a5a4d7d5cc38ec36e7005f6ff270e7df62083cc9782d76082341f735ba" exitCode=0 Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.835048 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" event={"ID":"206c274e-ae59-471d-b0ca-5f6f34b28706","Type":"ContainerDied","Data":"68e6e3a5a4d7d5cc38ec36e7005f6ff270e7df62083cc9782d76082341f735ba"} Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.835092 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" event={"ID":"206c274e-ae59-471d-b0ca-5f6f34b28706","Type":"ContainerStarted","Data":"f414db0e2825ada3e02817c0a96377a3786b28507a2f9687c3102442360514e9"} Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.835202 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.845445 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6"] Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.954971 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjpcl\" (UniqueName: \"kubernetes.io/projected/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-kube-api-access-hjpcl\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6\" (UID: \"85f7f29a-3282-41e0-bc15-b4e3e54f27c0\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.955043 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6\" (UID: \"85f7f29a-3282-41e0-bc15-b4e3e54f27c0\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" Jan 02 00:17:33 crc kubenswrapper[4766]: I0102 00:17:33.955182 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6\" (UID: \"85f7f29a-3282-41e0-bc15-b4e3e54f27c0\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" Jan 02 00:17:34 crc kubenswrapper[4766]: I0102 00:17:34.057067 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6\" (UID: \"85f7f29a-3282-41e0-bc15-b4e3e54f27c0\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" Jan 02 00:17:34 crc kubenswrapper[4766]: I0102 00:17:34.057197 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjpcl\" (UniqueName: \"kubernetes.io/projected/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-kube-api-access-hjpcl\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6\" (UID: \"85f7f29a-3282-41e0-bc15-b4e3e54f27c0\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" Jan 02 00:17:34 crc kubenswrapper[4766]: I0102 00:17:34.057291 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6\" (UID: \"85f7f29a-3282-41e0-bc15-b4e3e54f27c0\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" Jan 02 00:17:34 crc kubenswrapper[4766]: I0102 00:17:34.057557 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6\" (UID: \"85f7f29a-3282-41e0-bc15-b4e3e54f27c0\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" Jan 02 00:17:34 crc kubenswrapper[4766]: I0102 00:17:34.057977 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6\" (UID: \"85f7f29a-3282-41e0-bc15-b4e3e54f27c0\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" Jan 02 00:17:34 crc kubenswrapper[4766]: I0102 00:17:34.092536 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjpcl\" (UniqueName: \"kubernetes.io/projected/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-kube-api-access-hjpcl\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6\" (UID: \"85f7f29a-3282-41e0-bc15-b4e3e54f27c0\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" Jan 02 00:17:34 crc kubenswrapper[4766]: I0102 00:17:34.170107 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" Jan 02 00:17:34 crc kubenswrapper[4766]: I0102 00:17:34.572128 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6"] Jan 02 00:17:34 crc kubenswrapper[4766]: I0102 00:17:34.847056 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" event={"ID":"206c274e-ae59-471d-b0ca-5f6f34b28706","Type":"ContainerStarted","Data":"b80a05a2cd8c9a6295de62cf2352a59a8fb5977c4c366dc3eff8168586f69b44"} Jan 02 00:17:34 crc kubenswrapper[4766]: I0102 00:17:34.851535 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" event={"ID":"85f7f29a-3282-41e0-bc15-b4e3e54f27c0","Type":"ContainerStarted","Data":"70cfebd6d9fa99730aa96590e6f073f45d8fa6e428d0aa3d54c67c09644373f1"} Jan 02 00:17:34 crc kubenswrapper[4766]: I0102 00:17:34.851573 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" event={"ID":"85f7f29a-3282-41e0-bc15-b4e3e54f27c0","Type":"ContainerStarted","Data":"bac3070221e8c7bc02268025a9e5d13fa34c9d9e01eecfefba88e55c57cc0196"} Jan 02 00:17:35 crc kubenswrapper[4766]: I0102 00:17:35.859831 4766 generic.go:334] "Generic (PLEG): container finished" podID="206c274e-ae59-471d-b0ca-5f6f34b28706" containerID="b80a05a2cd8c9a6295de62cf2352a59a8fb5977c4c366dc3eff8168586f69b44" exitCode=0 Jan 02 00:17:35 crc kubenswrapper[4766]: I0102 00:17:35.860237 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" event={"ID":"206c274e-ae59-471d-b0ca-5f6f34b28706","Type":"ContainerDied","Data":"b80a05a2cd8c9a6295de62cf2352a59a8fb5977c4c366dc3eff8168586f69b44"} Jan 02 00:17:35 crc kubenswrapper[4766]: I0102 00:17:35.864459 4766 generic.go:334] "Generic (PLEG): container finished" podID="85f7f29a-3282-41e0-bc15-b4e3e54f27c0" containerID="70cfebd6d9fa99730aa96590e6f073f45d8fa6e428d0aa3d54c67c09644373f1" exitCode=0 Jan 02 00:17:35 crc kubenswrapper[4766]: I0102 00:17:35.864490 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" event={"ID":"85f7f29a-3282-41e0-bc15-b4e3e54f27c0","Type":"ContainerDied","Data":"70cfebd6d9fa99730aa96590e6f073f45d8fa6e428d0aa3d54c67c09644373f1"} Jan 02 00:17:36 crc kubenswrapper[4766]: I0102 00:17:36.871594 4766 generic.go:334] "Generic (PLEG): container finished" podID="206c274e-ae59-471d-b0ca-5f6f34b28706" containerID="239bdb8d4ccc1de656c19dc849d173704620f9378e6076c1a66412db9732b64f" exitCode=0 Jan 02 00:17:36 crc kubenswrapper[4766]: I0102 00:17:36.871647 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" event={"ID":"206c274e-ae59-471d-b0ca-5f6f34b28706","Type":"ContainerDied","Data":"239bdb8d4ccc1de656c19dc849d173704620f9378e6076c1a66412db9732b64f"} Jan 02 00:17:37 crc kubenswrapper[4766]: I0102 00:17:37.880424 4766 generic.go:334] "Generic (PLEG): container finished" podID="85f7f29a-3282-41e0-bc15-b4e3e54f27c0" containerID="fe0e4074fc2ae3ae8ef775de2ad008363584b5f4976de2d51d71025826a1723f" exitCode=0 Jan 02 00:17:37 crc kubenswrapper[4766]: I0102 00:17:37.880511 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" event={"ID":"85f7f29a-3282-41e0-bc15-b4e3e54f27c0","Type":"ContainerDied","Data":"fe0e4074fc2ae3ae8ef775de2ad008363584b5f4976de2d51d71025826a1723f"} Jan 02 00:17:38 crc kubenswrapper[4766]: I0102 00:17:38.232899 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" Jan 02 00:17:38 crc kubenswrapper[4766]: I0102 00:17:38.376896 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/206c274e-ae59-471d-b0ca-5f6f34b28706-bundle\") pod \"206c274e-ae59-471d-b0ca-5f6f34b28706\" (UID: \"206c274e-ae59-471d-b0ca-5f6f34b28706\") " Jan 02 00:17:38 crc kubenswrapper[4766]: I0102 00:17:38.376956 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/206c274e-ae59-471d-b0ca-5f6f34b28706-util\") pod \"206c274e-ae59-471d-b0ca-5f6f34b28706\" (UID: \"206c274e-ae59-471d-b0ca-5f6f34b28706\") " Jan 02 00:17:38 crc kubenswrapper[4766]: I0102 00:17:38.377107 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rhml\" (UniqueName: \"kubernetes.io/projected/206c274e-ae59-471d-b0ca-5f6f34b28706-kube-api-access-4rhml\") pod \"206c274e-ae59-471d-b0ca-5f6f34b28706\" (UID: \"206c274e-ae59-471d-b0ca-5f6f34b28706\") " Jan 02 00:17:38 crc kubenswrapper[4766]: I0102 00:17:38.377744 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/206c274e-ae59-471d-b0ca-5f6f34b28706-bundle" (OuterVolumeSpecName: "bundle") pod "206c274e-ae59-471d-b0ca-5f6f34b28706" (UID: "206c274e-ae59-471d-b0ca-5f6f34b28706"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:17:38 crc kubenswrapper[4766]: I0102 00:17:38.382274 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/206c274e-ae59-471d-b0ca-5f6f34b28706-kube-api-access-4rhml" (OuterVolumeSpecName: "kube-api-access-4rhml") pod "206c274e-ae59-471d-b0ca-5f6f34b28706" (UID: "206c274e-ae59-471d-b0ca-5f6f34b28706"). InnerVolumeSpecName "kube-api-access-4rhml". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:17:38 crc kubenswrapper[4766]: I0102 00:17:38.394267 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/206c274e-ae59-471d-b0ca-5f6f34b28706-util" (OuterVolumeSpecName: "util") pod "206c274e-ae59-471d-b0ca-5f6f34b28706" (UID: "206c274e-ae59-471d-b0ca-5f6f34b28706"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:17:38 crc kubenswrapper[4766]: I0102 00:17:38.478034 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rhml\" (UniqueName: \"kubernetes.io/projected/206c274e-ae59-471d-b0ca-5f6f34b28706-kube-api-access-4rhml\") on node \"crc\" DevicePath \"\"" Jan 02 00:17:38 crc kubenswrapper[4766]: I0102 00:17:38.478063 4766 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/206c274e-ae59-471d-b0ca-5f6f34b28706-bundle\") on node \"crc\" DevicePath \"\"" Jan 02 00:17:38 crc kubenswrapper[4766]: I0102 00:17:38.478075 4766 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/206c274e-ae59-471d-b0ca-5f6f34b28706-util\") on node \"crc\" DevicePath \"\"" Jan 02 00:17:38 crc kubenswrapper[4766]: I0102 00:17:38.887346 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" event={"ID":"85f7f29a-3282-41e0-bc15-b4e3e54f27c0","Type":"ContainerStarted","Data":"4707a4fa86e773d2d139b58be38e7f8a86447884623ac5f5b14f12f79e79bd28"} Jan 02 00:17:38 crc kubenswrapper[4766]: I0102 00:17:38.889725 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" event={"ID":"206c274e-ae59-471d-b0ca-5f6f34b28706","Type":"ContainerDied","Data":"f414db0e2825ada3e02817c0a96377a3786b28507a2f9687c3102442360514e9"} Jan 02 00:17:38 crc kubenswrapper[4766]: I0102 00:17:38.889743 4766 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f414db0e2825ada3e02817c0a96377a3786b28507a2f9687c3102442360514e9" Jan 02 00:17:38 crc kubenswrapper[4766]: I0102 00:17:38.889787 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq" Jan 02 00:17:39 crc kubenswrapper[4766]: I0102 00:17:39.115416 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" podStartSLOduration=4.970211211 podStartE2EDuration="6.115399207s" podCreationTimestamp="2026-01-02 00:17:33 +0000 UTC" firstStartedPulling="2026-01-02 00:17:35.875118628 +0000 UTC m=+630.673605822" lastFinishedPulling="2026-01-02 00:17:37.020306664 +0000 UTC m=+631.818793818" observedRunningTime="2026-01-02 00:17:39.111630934 +0000 UTC m=+633.910118108" watchObservedRunningTime="2026-01-02 00:17:39.115399207 +0000 UTC m=+633.913886361" Jan 02 00:17:39 crc kubenswrapper[4766]: I0102 00:17:39.897968 4766 generic.go:334] "Generic (PLEG): container finished" podID="85f7f29a-3282-41e0-bc15-b4e3e54f27c0" containerID="4707a4fa86e773d2d139b58be38e7f8a86447884623ac5f5b14f12f79e79bd28" exitCode=0 Jan 02 00:17:39 crc kubenswrapper[4766]: I0102 00:17:39.898017 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" event={"ID":"85f7f29a-3282-41e0-bc15-b4e3e54f27c0","Type":"ContainerDied","Data":"4707a4fa86e773d2d139b58be38e7f8a86447884623ac5f5b14f12f79e79bd28"} Jan 02 00:17:41 crc kubenswrapper[4766]: I0102 00:17:41.348389 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" Jan 02 00:17:41 crc kubenswrapper[4766]: I0102 00:17:41.441719 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjpcl\" (UniqueName: \"kubernetes.io/projected/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-kube-api-access-hjpcl\") pod \"85f7f29a-3282-41e0-bc15-b4e3e54f27c0\" (UID: \"85f7f29a-3282-41e0-bc15-b4e3e54f27c0\") " Jan 02 00:17:41 crc kubenswrapper[4766]: I0102 00:17:41.441789 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-util\") pod \"85f7f29a-3282-41e0-bc15-b4e3e54f27c0\" (UID: \"85f7f29a-3282-41e0-bc15-b4e3e54f27c0\") " Jan 02 00:17:41 crc kubenswrapper[4766]: I0102 00:17:41.441849 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-bundle\") pod \"85f7f29a-3282-41e0-bc15-b4e3e54f27c0\" (UID: \"85f7f29a-3282-41e0-bc15-b4e3e54f27c0\") " Jan 02 00:17:41 crc kubenswrapper[4766]: I0102 00:17:41.442759 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-bundle" (OuterVolumeSpecName: "bundle") pod "85f7f29a-3282-41e0-bc15-b4e3e54f27c0" (UID: "85f7f29a-3282-41e0-bc15-b4e3e54f27c0"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:17:41 crc kubenswrapper[4766]: I0102 00:17:41.447463 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-kube-api-access-hjpcl" (OuterVolumeSpecName: "kube-api-access-hjpcl") pod "85f7f29a-3282-41e0-bc15-b4e3e54f27c0" (UID: "85f7f29a-3282-41e0-bc15-b4e3e54f27c0"). InnerVolumeSpecName "kube-api-access-hjpcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:17:41 crc kubenswrapper[4766]: I0102 00:17:41.460276 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-util" (OuterVolumeSpecName: "util") pod "85f7f29a-3282-41e0-bc15-b4e3e54f27c0" (UID: "85f7f29a-3282-41e0-bc15-b4e3e54f27c0"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:17:41 crc kubenswrapper[4766]: I0102 00:17:41.636808 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjpcl\" (UniqueName: \"kubernetes.io/projected/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-kube-api-access-hjpcl\") on node \"crc\" DevicePath \"\"" Jan 02 00:17:41 crc kubenswrapper[4766]: I0102 00:17:41.636846 4766 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-util\") on node \"crc\" DevicePath \"\"" Jan 02 00:17:41 crc kubenswrapper[4766]: I0102 00:17:41.636855 4766 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/85f7f29a-3282-41e0-bc15-b4e3e54f27c0-bundle\") on node \"crc\" DevicePath \"\"" Jan 02 00:17:41 crc kubenswrapper[4766]: I0102 00:17:41.912088 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" event={"ID":"85f7f29a-3282-41e0-bc15-b4e3e54f27c0","Type":"ContainerDied","Data":"bac3070221e8c7bc02268025a9e5d13fa34c9d9e01eecfefba88e55c57cc0196"} Jan 02 00:17:41 crc kubenswrapper[4766]: I0102 00:17:41.912123 4766 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bac3070221e8c7bc02268025a9e5d13fa34c9d9e01eecfefba88e55c57cc0196" Jan 02 00:17:41 crc kubenswrapper[4766]: I0102 00:17:41.912177 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.345369 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4"] Jan 02 00:17:42 crc kubenswrapper[4766]: E0102 00:17:42.346064 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85f7f29a-3282-41e0-bc15-b4e3e54f27c0" containerName="pull" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.346088 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="85f7f29a-3282-41e0-bc15-b4e3e54f27c0" containerName="pull" Jan 02 00:17:42 crc kubenswrapper[4766]: E0102 00:17:42.346113 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="206c274e-ae59-471d-b0ca-5f6f34b28706" containerName="util" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.346122 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="206c274e-ae59-471d-b0ca-5f6f34b28706" containerName="util" Jan 02 00:17:42 crc kubenswrapper[4766]: E0102 00:17:42.346154 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="206c274e-ae59-471d-b0ca-5f6f34b28706" containerName="extract" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.346162 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="206c274e-ae59-471d-b0ca-5f6f34b28706" containerName="extract" Jan 02 00:17:42 crc kubenswrapper[4766]: E0102 00:17:42.346174 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85f7f29a-3282-41e0-bc15-b4e3e54f27c0" containerName="extract" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.346181 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="85f7f29a-3282-41e0-bc15-b4e3e54f27c0" containerName="extract" Jan 02 00:17:42 crc kubenswrapper[4766]: E0102 00:17:42.346191 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85f7f29a-3282-41e0-bc15-b4e3e54f27c0" containerName="util" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.346198 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="85f7f29a-3282-41e0-bc15-b4e3e54f27c0" containerName="util" Jan 02 00:17:42 crc kubenswrapper[4766]: E0102 00:17:42.346211 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="206c274e-ae59-471d-b0ca-5f6f34b28706" containerName="pull" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.346220 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="206c274e-ae59-471d-b0ca-5f6f34b28706" containerName="pull" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.346339 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="85f7f29a-3282-41e0-bc15-b4e3e54f27c0" containerName="extract" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.346359 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="206c274e-ae59-471d-b0ca-5f6f34b28706" containerName="extract" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.347210 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.351381 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.369386 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4"] Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.428657 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvnls\" (UniqueName: \"kubernetes.io/projected/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-kube-api-access-lvnls\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4\" (UID: \"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.428747 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4\" (UID: \"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.428801 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4\" (UID: \"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.815721 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvnls\" (UniqueName: \"kubernetes.io/projected/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-kube-api-access-lvnls\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4\" (UID: \"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.815800 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4\" (UID: \"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.815852 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4\" (UID: \"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.816345 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4\" (UID: \"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.816361 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4\" (UID: \"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.913783 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvnls\" (UniqueName: \"kubernetes.io/projected/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-kube-api-access-lvnls\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4\" (UID: \"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" Jan 02 00:17:42 crc kubenswrapper[4766]: I0102 00:17:42.960985 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" Jan 02 00:17:44 crc kubenswrapper[4766]: I0102 00:17:44.120690 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4"] Jan 02 00:17:44 crc kubenswrapper[4766]: I0102 00:17:44.907185 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-t8dpk"] Jan 02 00:17:44 crc kubenswrapper[4766]: I0102 00:17:44.908356 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-t8dpk" Jan 02 00:17:44 crc kubenswrapper[4766]: W0102 00:17:44.910430 4766 reflector.go:561] object-"openshift-operators"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-operators": no relationship found between node 'crc' and this object Jan 02 00:17:44 crc kubenswrapper[4766]: E0102 00:17:44.910493 4766 reflector.go:158] "Unhandled Error" err="object-\"openshift-operators\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 02 00:17:44 crc kubenswrapper[4766]: W0102 00:17:44.910441 4766 reflector.go:561] object-"openshift-operators"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-operators": no relationship found between node 'crc' and this object Jan 02 00:17:44 crc kubenswrapper[4766]: W0102 00:17:44.910515 4766 reflector.go:561] object-"openshift-operators"/"obo-prometheus-operator-dockercfg-b7stt": failed to list *v1.Secret: secrets "obo-prometheus-operator-dockercfg-b7stt" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-operators": no relationship found between node 'crc' and this object Jan 02 00:17:44 crc kubenswrapper[4766]: E0102 00:17:44.910526 4766 reflector.go:158] "Unhandled Error" err="object-\"openshift-operators\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 02 00:17:44 crc kubenswrapper[4766]: E0102 00:17:44.910554 4766 reflector.go:158] "Unhandled Error" err="object-\"openshift-operators\"/\"obo-prometheus-operator-dockercfg-b7stt\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"obo-prometheus-operator-dockercfg-b7stt\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 02 00:17:44 crc kubenswrapper[4766]: I0102 00:17:44.915444 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-t8dpk"] Jan 02 00:17:44 crc kubenswrapper[4766]: I0102 00:17:44.969938 4766 generic.go:334] "Generic (PLEG): container finished" podID="0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b" containerID="78756df0ce9825d5df97d9d5d8a8cf879857a1d71a6edc6a9b39cfad82c4f214" exitCode=0 Jan 02 00:17:44 crc kubenswrapper[4766]: I0102 00:17:44.969984 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" event={"ID":"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b","Type":"ContainerDied","Data":"78756df0ce9825d5df97d9d5d8a8cf879857a1d71a6edc6a9b39cfad82c4f214"} Jan 02 00:17:44 crc kubenswrapper[4766]: I0102 00:17:44.970014 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" event={"ID":"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b","Type":"ContainerStarted","Data":"b7b8db7bdf0c944f521ae009e7424c7d2022ce8c600527ddfa31b4478b224365"} Jan 02 00:17:44 crc kubenswrapper[4766]: I0102 00:17:44.985333 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mggdg\" (UniqueName: \"kubernetes.io/projected/0bae17f8-e0b5-4658-a923-279617d6d958-kube-api-access-mggdg\") pod \"obo-prometheus-operator-68bc856cb9-t8dpk\" (UID: \"0bae17f8-e0b5-4658-a923-279617d6d958\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-t8dpk" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.046708 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-l98hh"] Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.047643 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-l98hh" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.049259 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-8mngc" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.049833 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.069068 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc"] Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.069835 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.075275 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-l98hh"] Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.081897 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc"] Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.086609 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mggdg\" (UniqueName: \"kubernetes.io/projected/0bae17f8-e0b5-4658-a923-279617d6d958-kube-api-access-mggdg\") pod \"obo-prometheus-operator-68bc856cb9-t8dpk\" (UID: \"0bae17f8-e0b5-4658-a923-279617d6d958\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-t8dpk" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.086694 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3d8010c4-a7db-48af-8230-720463cc00a3-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-74fb98679c-l98hh\" (UID: \"3d8010c4-a7db-48af-8230-720463cc00a3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-l98hh" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.086717 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3d8010c4-a7db-48af-8230-720463cc00a3-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-74fb98679c-l98hh\" (UID: \"3d8010c4-a7db-48af-8230-720463cc00a3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-l98hh" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.086777 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9847ef6e-30b8-452a-9b4f-53cb6aabe7ca-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc\" (UID: \"9847ef6e-30b8-452a-9b4f-53cb6aabe7ca\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.086796 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9847ef6e-30b8-452a-9b4f-53cb6aabe7ca-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc\" (UID: \"9847ef6e-30b8-452a-9b4f-53cb6aabe7ca\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.188083 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3d8010c4-a7db-48af-8230-720463cc00a3-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-74fb98679c-l98hh\" (UID: \"3d8010c4-a7db-48af-8230-720463cc00a3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-l98hh" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.188120 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3d8010c4-a7db-48af-8230-720463cc00a3-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-74fb98679c-l98hh\" (UID: \"3d8010c4-a7db-48af-8230-720463cc00a3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-l98hh" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.188158 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9847ef6e-30b8-452a-9b4f-53cb6aabe7ca-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc\" (UID: \"9847ef6e-30b8-452a-9b4f-53cb6aabe7ca\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.188179 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9847ef6e-30b8-452a-9b4f-53cb6aabe7ca-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc\" (UID: \"9847ef6e-30b8-452a-9b4f-53cb6aabe7ca\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.193513 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3d8010c4-a7db-48af-8230-720463cc00a3-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-74fb98679c-l98hh\" (UID: \"3d8010c4-a7db-48af-8230-720463cc00a3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-l98hh" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.193864 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9847ef6e-30b8-452a-9b4f-53cb6aabe7ca-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc\" (UID: \"9847ef6e-30b8-452a-9b4f-53cb6aabe7ca\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.194162 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9847ef6e-30b8-452a-9b4f-53cb6aabe7ca-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc\" (UID: \"9847ef6e-30b8-452a-9b4f-53cb6aabe7ca\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.209046 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3d8010c4-a7db-48af-8230-720463cc00a3-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-74fb98679c-l98hh\" (UID: \"3d8010c4-a7db-48af-8230-720463cc00a3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-l98hh" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.238787 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-clrx5"] Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.239435 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-clrx5" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.242113 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-pd5nh" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.242153 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.254952 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-clrx5"] Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.289764 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xtht\" (UniqueName: \"kubernetes.io/projected/c2cc883f-2ba7-499a-b5cc-b84de8b742ba-kube-api-access-6xtht\") pod \"observability-operator-59bdc8b94-clrx5\" (UID: \"c2cc883f-2ba7-499a-b5cc-b84de8b742ba\") " pod="openshift-operators/observability-operator-59bdc8b94-clrx5" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.289912 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/c2cc883f-2ba7-499a-b5cc-b84de8b742ba-observability-operator-tls\") pod \"observability-operator-59bdc8b94-clrx5\" (UID: \"c2cc883f-2ba7-499a-b5cc-b84de8b742ba\") " pod="openshift-operators/observability-operator-59bdc8b94-clrx5" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.337598 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-qptw2"] Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.338252 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-qptw2" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.342099 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-6z695" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.354393 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-qptw2"] Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.362337 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-l98hh" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.385661 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.393455 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/d0926af8-bde6-483d-b35a-06b9f1e39558-openshift-service-ca\") pod \"perses-operator-5bf474d74f-qptw2\" (UID: \"d0926af8-bde6-483d-b35a-06b9f1e39558\") " pod="openshift-operators/perses-operator-5bf474d74f-qptw2" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.393501 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpzgz\" (UniqueName: \"kubernetes.io/projected/d0926af8-bde6-483d-b35a-06b9f1e39558-kube-api-access-gpzgz\") pod \"perses-operator-5bf474d74f-qptw2\" (UID: \"d0926af8-bde6-483d-b35a-06b9f1e39558\") " pod="openshift-operators/perses-operator-5bf474d74f-qptw2" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.393636 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xtht\" (UniqueName: \"kubernetes.io/projected/c2cc883f-2ba7-499a-b5cc-b84de8b742ba-kube-api-access-6xtht\") pod \"observability-operator-59bdc8b94-clrx5\" (UID: \"c2cc883f-2ba7-499a-b5cc-b84de8b742ba\") " pod="openshift-operators/observability-operator-59bdc8b94-clrx5" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.393671 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/c2cc883f-2ba7-499a-b5cc-b84de8b742ba-observability-operator-tls\") pod \"observability-operator-59bdc8b94-clrx5\" (UID: \"c2cc883f-2ba7-499a-b5cc-b84de8b742ba\") " pod="openshift-operators/observability-operator-59bdc8b94-clrx5" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.397208 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/c2cc883f-2ba7-499a-b5cc-b84de8b742ba-observability-operator-tls\") pod \"observability-operator-59bdc8b94-clrx5\" (UID: \"c2cc883f-2ba7-499a-b5cc-b84de8b742ba\") " pod="openshift-operators/observability-operator-59bdc8b94-clrx5" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.494734 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/d0926af8-bde6-483d-b35a-06b9f1e39558-openshift-service-ca\") pod \"perses-operator-5bf474d74f-qptw2\" (UID: \"d0926af8-bde6-483d-b35a-06b9f1e39558\") " pod="openshift-operators/perses-operator-5bf474d74f-qptw2" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.495149 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpzgz\" (UniqueName: \"kubernetes.io/projected/d0926af8-bde6-483d-b35a-06b9f1e39558-kube-api-access-gpzgz\") pod \"perses-operator-5bf474d74f-qptw2\" (UID: \"d0926af8-bde6-483d-b35a-06b9f1e39558\") " pod="openshift-operators/perses-operator-5bf474d74f-qptw2" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.673152 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-l98hh"] Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.848766 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc"] Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.869049 4766 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-b7stt" Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.974712 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc" event={"ID":"9847ef6e-30b8-452a-9b4f-53cb6aabe7ca","Type":"ContainerStarted","Data":"69839f86eb2b7527d9195a369e02faa7395be60a1a5468ead0c8cc4e89e8713c"} Jan 02 00:17:45 crc kubenswrapper[4766]: I0102 00:17:45.975711 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-l98hh" event={"ID":"3d8010c4-a7db-48af-8230-720463cc00a3","Type":"ContainerStarted","Data":"0ef8b65d77d8766cad91ab91d4ccac7f5dedaedb134c9d2800f37d9446d707d8"} Jan 02 00:17:46 crc kubenswrapper[4766]: I0102 00:17:46.054442 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Jan 02 00:17:46 crc kubenswrapper[4766]: I0102 00:17:46.056647 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/d0926af8-bde6-483d-b35a-06b9f1e39558-openshift-service-ca\") pod \"perses-operator-5bf474d74f-qptw2\" (UID: \"d0926af8-bde6-483d-b35a-06b9f1e39558\") " pod="openshift-operators/perses-operator-5bf474d74f-qptw2" Jan 02 00:17:46 crc kubenswrapper[4766]: E0102 00:17:46.102074 4766 projected.go:288] Couldn't get configMap openshift-operators/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Jan 02 00:17:46 crc kubenswrapper[4766]: E0102 00:17:46.102123 4766 projected.go:194] Error preparing data for projected volume kube-api-access-mggdg for pod openshift-operators/obo-prometheus-operator-68bc856cb9-t8dpk: failed to sync configmap cache: timed out waiting for the condition Jan 02 00:17:46 crc kubenswrapper[4766]: E0102 00:17:46.102189 4766 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0bae17f8-e0b5-4658-a923-279617d6d958-kube-api-access-mggdg podName:0bae17f8-e0b5-4658-a923-279617d6d958 nodeName:}" failed. No retries permitted until 2026-01-02 00:17:46.602170153 +0000 UTC m=+641.400657307 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-mggdg" (UniqueName: "kubernetes.io/projected/0bae17f8-e0b5-4658-a923-279617d6d958-kube-api-access-mggdg") pod "obo-prometheus-operator-68bc856cb9-t8dpk" (UID: "0bae17f8-e0b5-4658-a923-279617d6d958") : failed to sync configmap cache: timed out waiting for the condition Jan 02 00:17:46 crc kubenswrapper[4766]: I0102 00:17:46.301992 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Jan 02 00:17:46 crc kubenswrapper[4766]: I0102 00:17:46.314024 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xtht\" (UniqueName: \"kubernetes.io/projected/c2cc883f-2ba7-499a-b5cc-b84de8b742ba-kube-api-access-6xtht\") pod \"observability-operator-59bdc8b94-clrx5\" (UID: \"c2cc883f-2ba7-499a-b5cc-b84de8b742ba\") " pod="openshift-operators/observability-operator-59bdc8b94-clrx5" Jan 02 00:17:46 crc kubenswrapper[4766]: I0102 00:17:46.314438 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpzgz\" (UniqueName: \"kubernetes.io/projected/d0926af8-bde6-483d-b35a-06b9f1e39558-kube-api-access-gpzgz\") pod \"perses-operator-5bf474d74f-qptw2\" (UID: \"d0926af8-bde6-483d-b35a-06b9f1e39558\") " pod="openshift-operators/perses-operator-5bf474d74f-qptw2" Jan 02 00:17:46 crc kubenswrapper[4766]: I0102 00:17:46.455811 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-clrx5" Jan 02 00:17:46 crc kubenswrapper[4766]: I0102 00:17:46.553526 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-qptw2" Jan 02 00:17:46 crc kubenswrapper[4766]: I0102 00:17:46.618985 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mggdg\" (UniqueName: \"kubernetes.io/projected/0bae17f8-e0b5-4658-a923-279617d6d958-kube-api-access-mggdg\") pod \"obo-prometheus-operator-68bc856cb9-t8dpk\" (UID: \"0bae17f8-e0b5-4658-a923-279617d6d958\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-t8dpk" Jan 02 00:17:46 crc kubenswrapper[4766]: I0102 00:17:46.630722 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mggdg\" (UniqueName: \"kubernetes.io/projected/0bae17f8-e0b5-4658-a923-279617d6d958-kube-api-access-mggdg\") pod \"obo-prometheus-operator-68bc856cb9-t8dpk\" (UID: \"0bae17f8-e0b5-4658-a923-279617d6d958\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-t8dpk" Jan 02 00:17:46 crc kubenswrapper[4766]: I0102 00:17:46.735088 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-t8dpk" Jan 02 00:17:46 crc kubenswrapper[4766]: I0102 00:17:46.777830 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-clrx5"] Jan 02 00:17:46 crc kubenswrapper[4766]: W0102 00:17:46.799362 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc2cc883f_2ba7_499a_b5cc_b84de8b742ba.slice/crio-521d62b8847f44c8c25287ab7cad52aaee91f2a427252f44c15d0cf739b70a26 WatchSource:0}: Error finding container 521d62b8847f44c8c25287ab7cad52aaee91f2a427252f44c15d0cf739b70a26: Status 404 returned error can't find the container with id 521d62b8847f44c8c25287ab7cad52aaee91f2a427252f44c15d0cf739b70a26 Jan 02 00:17:46 crc kubenswrapper[4766]: I0102 00:17:46.995060 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-clrx5" event={"ID":"c2cc883f-2ba7-499a-b5cc-b84de8b742ba","Type":"ContainerStarted","Data":"521d62b8847f44c8c25287ab7cad52aaee91f2a427252f44c15d0cf739b70a26"} Jan 02 00:17:47 crc kubenswrapper[4766]: I0102 00:17:47.041723 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-t8dpk"] Jan 02 00:17:47 crc kubenswrapper[4766]: W0102 00:17:47.048370 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0bae17f8_e0b5_4658_a923_279617d6d958.slice/crio-ffbb6c72a36cd84a815d063b4de42330b391d98fcb36aeab233210d6ba820d8d WatchSource:0}: Error finding container ffbb6c72a36cd84a815d063b4de42330b391d98fcb36aeab233210d6ba820d8d: Status 404 returned error can't find the container with id ffbb6c72a36cd84a815d063b4de42330b391d98fcb36aeab233210d6ba820d8d Jan 02 00:17:47 crc kubenswrapper[4766]: I0102 00:17:47.175347 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-qptw2"] Jan 02 00:17:47 crc kubenswrapper[4766]: W0102 00:17:47.179758 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd0926af8_bde6_483d_b35a_06b9f1e39558.slice/crio-bd38958894fb651f78de49b643255dd1041ee4d2388d1ade4183cffda72d8525 WatchSource:0}: Error finding container bd38958894fb651f78de49b643255dd1041ee4d2388d1ade4183cffda72d8525: Status 404 returned error can't find the container with id bd38958894fb651f78de49b643255dd1041ee4d2388d1ade4183cffda72d8525 Jan 02 00:17:48 crc kubenswrapper[4766]: I0102 00:17:48.009644 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-t8dpk" event={"ID":"0bae17f8-e0b5-4658-a923-279617d6d958","Type":"ContainerStarted","Data":"ffbb6c72a36cd84a815d063b4de42330b391d98fcb36aeab233210d6ba820d8d"} Jan 02 00:17:48 crc kubenswrapper[4766]: I0102 00:17:48.012719 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-qptw2" event={"ID":"d0926af8-bde6-483d-b35a-06b9f1e39558","Type":"ContainerStarted","Data":"bd38958894fb651f78de49b643255dd1041ee4d2388d1ade4183cffda72d8525"} Jan 02 00:17:50 crc kubenswrapper[4766]: I0102 00:17:50.170980 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elastic-operator-7fdd459f5-l8vpz"] Jan 02 00:17:50 crc kubenswrapper[4766]: I0102 00:17:50.171998 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-7fdd459f5-l8vpz" Jan 02 00:17:50 crc kubenswrapper[4766]: I0102 00:17:50.178253 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"openshift-service-ca.crt" Jan 02 00:17:50 crc kubenswrapper[4766]: I0102 00:17:50.178302 4766 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-dockercfg-dfnc5" Jan 02 00:17:50 crc kubenswrapper[4766]: I0102 00:17:50.179786 4766 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-service-cert" Jan 02 00:17:50 crc kubenswrapper[4766]: I0102 00:17:50.179942 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"kube-root-ca.crt" Jan 02 00:17:50 crc kubenswrapper[4766]: I0102 00:17:50.187396 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-7fdd459f5-l8vpz"] Jan 02 00:17:50 crc kubenswrapper[4766]: I0102 00:17:50.302108 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/44ef2541-95e7-471c-a509-c2c3bb4f9141-apiservice-cert\") pod \"elastic-operator-7fdd459f5-l8vpz\" (UID: \"44ef2541-95e7-471c-a509-c2c3bb4f9141\") " pod="service-telemetry/elastic-operator-7fdd459f5-l8vpz" Jan 02 00:17:50 crc kubenswrapper[4766]: I0102 00:17:50.302195 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfngc\" (UniqueName: \"kubernetes.io/projected/44ef2541-95e7-471c-a509-c2c3bb4f9141-kube-api-access-dfngc\") pod \"elastic-operator-7fdd459f5-l8vpz\" (UID: \"44ef2541-95e7-471c-a509-c2c3bb4f9141\") " pod="service-telemetry/elastic-operator-7fdd459f5-l8vpz" Jan 02 00:17:50 crc kubenswrapper[4766]: I0102 00:17:50.302251 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/44ef2541-95e7-471c-a509-c2c3bb4f9141-webhook-cert\") pod \"elastic-operator-7fdd459f5-l8vpz\" (UID: \"44ef2541-95e7-471c-a509-c2c3bb4f9141\") " pod="service-telemetry/elastic-operator-7fdd459f5-l8vpz" Jan 02 00:17:50 crc kubenswrapper[4766]: I0102 00:17:50.406212 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/44ef2541-95e7-471c-a509-c2c3bb4f9141-apiservice-cert\") pod \"elastic-operator-7fdd459f5-l8vpz\" (UID: \"44ef2541-95e7-471c-a509-c2c3bb4f9141\") " pod="service-telemetry/elastic-operator-7fdd459f5-l8vpz" Jan 02 00:17:50 crc kubenswrapper[4766]: I0102 00:17:50.406292 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfngc\" (UniqueName: \"kubernetes.io/projected/44ef2541-95e7-471c-a509-c2c3bb4f9141-kube-api-access-dfngc\") pod \"elastic-operator-7fdd459f5-l8vpz\" (UID: \"44ef2541-95e7-471c-a509-c2c3bb4f9141\") " pod="service-telemetry/elastic-operator-7fdd459f5-l8vpz" Jan 02 00:17:50 crc kubenswrapper[4766]: I0102 00:17:50.406332 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/44ef2541-95e7-471c-a509-c2c3bb4f9141-webhook-cert\") pod \"elastic-operator-7fdd459f5-l8vpz\" (UID: \"44ef2541-95e7-471c-a509-c2c3bb4f9141\") " pod="service-telemetry/elastic-operator-7fdd459f5-l8vpz" Jan 02 00:17:50 crc kubenswrapper[4766]: I0102 00:17:50.418868 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/44ef2541-95e7-471c-a509-c2c3bb4f9141-apiservice-cert\") pod \"elastic-operator-7fdd459f5-l8vpz\" (UID: \"44ef2541-95e7-471c-a509-c2c3bb4f9141\") " pod="service-telemetry/elastic-operator-7fdd459f5-l8vpz" Jan 02 00:17:50 crc kubenswrapper[4766]: I0102 00:17:50.421449 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/44ef2541-95e7-471c-a509-c2c3bb4f9141-webhook-cert\") pod \"elastic-operator-7fdd459f5-l8vpz\" (UID: \"44ef2541-95e7-471c-a509-c2c3bb4f9141\") " pod="service-telemetry/elastic-operator-7fdd459f5-l8vpz" Jan 02 00:17:50 crc kubenswrapper[4766]: I0102 00:17:50.423176 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfngc\" (UniqueName: \"kubernetes.io/projected/44ef2541-95e7-471c-a509-c2c3bb4f9141-kube-api-access-dfngc\") pod \"elastic-operator-7fdd459f5-l8vpz\" (UID: \"44ef2541-95e7-471c-a509-c2c3bb4f9141\") " pod="service-telemetry/elastic-operator-7fdd459f5-l8vpz" Jan 02 00:17:50 crc kubenswrapper[4766]: I0102 00:17:50.491807 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-7fdd459f5-l8vpz" Jan 02 00:17:55 crc kubenswrapper[4766]: I0102 00:17:55.231253 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-dvfds"] Jan 02 00:17:55 crc kubenswrapper[4766]: I0102 00:17:55.232487 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-dvfds" Jan 02 00:17:55 crc kubenswrapper[4766]: I0102 00:17:55.242459 4766 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"interconnect-operator-dockercfg-77r74" Jan 02 00:17:55 crc kubenswrapper[4766]: I0102 00:17:55.252590 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-dvfds"] Jan 02 00:17:55 crc kubenswrapper[4766]: I0102 00:17:55.333886 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4nrb\" (UniqueName: \"kubernetes.io/projected/f3106de7-eb28-489f-90df-8efab3a6d338-kube-api-access-q4nrb\") pod \"interconnect-operator-5bb49f789d-dvfds\" (UID: \"f3106de7-eb28-489f-90df-8efab3a6d338\") " pod="service-telemetry/interconnect-operator-5bb49f789d-dvfds" Jan 02 00:17:55 crc kubenswrapper[4766]: I0102 00:17:55.437645 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4nrb\" (UniqueName: \"kubernetes.io/projected/f3106de7-eb28-489f-90df-8efab3a6d338-kube-api-access-q4nrb\") pod \"interconnect-operator-5bb49f789d-dvfds\" (UID: \"f3106de7-eb28-489f-90df-8efab3a6d338\") " pod="service-telemetry/interconnect-operator-5bb49f789d-dvfds" Jan 02 00:17:55 crc kubenswrapper[4766]: I0102 00:17:55.459726 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4nrb\" (UniqueName: \"kubernetes.io/projected/f3106de7-eb28-489f-90df-8efab3a6d338-kube-api-access-q4nrb\") pod \"interconnect-operator-5bb49f789d-dvfds\" (UID: \"f3106de7-eb28-489f-90df-8efab3a6d338\") " pod="service-telemetry/interconnect-operator-5bb49f789d-dvfds" Jan 02 00:17:55 crc kubenswrapper[4766]: I0102 00:17:55.553874 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-dvfds" Jan 02 00:18:03 crc kubenswrapper[4766]: E0102 00:18:03.278080 4766 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:2ecf763b02048d2cf4c17967a7b2cacc7afd6af0e963a39579d876f8f4170e3c" Jan 02 00:18:03 crc kubenswrapper[4766]: E0102 00:18:03.279815 4766 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:2ecf763b02048d2cf4c17967a7b2cacc7afd6af0e963a39579d876f8f4170e3c,Command:[],Args:[--namespace=$(NAMESPACE) --images=perses=$(RELATED_IMAGE_PERSES) --images=alertmanager=$(RELATED_IMAGE_ALERTMANAGER) --images=prometheus=$(RELATED_IMAGE_PROMETHEUS) --images=thanos=$(RELATED_IMAGE_THANOS) --images=ui-dashboards=$(RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN) --images=ui-distributed-tracing=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN) --images=ui-distributed-tracing-pf5=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5) --images=ui-distributed-tracing-pf4=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4) --images=ui-logging=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN) --images=ui-logging-pf4=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4) --images=ui-troubleshooting-panel=$(RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN) --images=ui-monitoring=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN) --images=ui-monitoring-pf5=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5) --images=korrel8r=$(RELATED_IMAGE_KORREL8R) --images=health-analyzer=$(RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER) --openshift.enabled=true],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER,Value:registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:dc62889b883f597de91b5389cc52c84c607247d49a807693be2f688e4703dfc3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS,Value:registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:1b555e21bba7c609111ace4380382a696d9aceeb6e9816bf9023b8f689b6c741,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_THANOS,Value:registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:a223bab813b82d698992490bbb60927f6288a83ba52d539836c250e1471f6d34,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PERSES,Value:registry.redhat.io/cluster-observability-operator/perses-rhel9@sha256:e797cdb47beef40b04da7b6d645bca3dc32e6247003c45b56b38efd9e13bf01c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-rhel9@sha256:093d2731ac848ed5fd57356b155a19d3bf7b8db96d95b09c5d0095e143f7254f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-rhel9@sha256:7d662a120305e2528acc7e9142b770b5b6a7f4932ddfcadfa4ac953935124895,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf5-rhel9@sha256:75465aabb0aa427a5c531a8fcde463f6d119afbcc618ebcbf6b7ee9bc8aad160,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf4-rhel9@sha256:dc18c8d6a4a9a0a574a57cc5082c8a9b26023bd6d69b9732892d584c1dfe5070,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-rhel9@sha256:369729978cecdc13c99ef3d179f8eb8a450a4a0cb70b63c27a55a15d1710ba27,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-pf4-rhel9@sha256:d8c7a61d147f62b204d5c5f16864386025393453c9a81ea327bbd25d7765d611,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-rhel9@sha256:b4a6eb1cc118a4334b424614959d8b7f361ddd779b3a72690ca49b0a3f26d9b8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-rhel9@sha256:21d4fff670893ba4b7fbc528cd49f8b71c8281cede9ef84f0697065bb6a7fc50,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-pf5-rhel9@sha256:12d9dbe297a1c3b9df671f21156992082bc483887d851fafe76e5d17321ff474,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KORREL8R,Value:registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:e65c37f04f6d76a0cbfe05edb3cddf6a8f14f859ee35cf3aebea8fcb991d2c19,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER,Value:registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:48e4e178c6eeaa9d5dd77a591c185a311b4b4a5caadb7199d48463123e31dc9e,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{400 -3} {} 400m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:observability-operator-tls,ReadOnly:true,MountPath:/etc/tls/private,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6xtht,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod observability-operator-59bdc8b94-clrx5_openshift-operators(c2cc883f-2ba7-499a-b5cc-b84de8b742ba): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 02 00:18:03 crc kubenswrapper[4766]: E0102 00:18:03.281529 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/observability-operator-59bdc8b94-clrx5" podUID="c2cc883f-2ba7-499a-b5cc-b84de8b742ba" Jan 02 00:18:03 crc kubenswrapper[4766]: E0102 00:18:03.873249 4766 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea" Jan 02 00:18:03 crc kubenswrapper[4766]: E0102 00:18:03.873564 4766 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc_openshift-operators(9847ef6e-30b8-452a-9b4f-53cb6aabe7ca): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 02 00:18:03 crc kubenswrapper[4766]: E0102 00:18:03.875033 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc" podUID="9847ef6e-30b8-452a-9b4f-53cb6aabe7ca" Jan 02 00:18:03 crc kubenswrapper[4766]: E0102 00:18:03.891370 4766 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea" Jan 02 00:18:03 crc kubenswrapper[4766]: E0102 00:18:03.891624 4766 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-74fb98679c-l98hh_openshift-operators(3d8010c4-a7db-48af-8230-720463cc00a3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 02 00:18:03 crc kubenswrapper[4766]: E0102 00:18:03.893065 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-l98hh" podUID="3d8010c4-a7db-48af-8230-720463cc00a3" Jan 02 00:18:04 crc kubenswrapper[4766]: E0102 00:18:04.258429 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-l98hh" podUID="3d8010c4-a7db-48af-8230-720463cc00a3" Jan 02 00:18:04 crc kubenswrapper[4766]: E0102 00:18:04.258657 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc" podUID="9847ef6e-30b8-452a-9b4f-53cb6aabe7ca" Jan 02 00:18:04 crc kubenswrapper[4766]: E0102 00:18:04.259173 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:2ecf763b02048d2cf4c17967a7b2cacc7afd6af0e963a39579d876f8f4170e3c\\\"\"" pod="openshift-operators/observability-operator-59bdc8b94-clrx5" podUID="c2cc883f-2ba7-499a-b5cc-b84de8b742ba" Jan 02 00:18:05 crc kubenswrapper[4766]: I0102 00:18:05.773664 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-7fdd459f5-l8vpz"] Jan 02 00:18:05 crc kubenswrapper[4766]: I0102 00:18:05.827592 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-dvfds"] Jan 02 00:18:06 crc kubenswrapper[4766]: I0102 00:18:06.268363 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-dvfds" event={"ID":"f3106de7-eb28-489f-90df-8efab3a6d338","Type":"ContainerStarted","Data":"83d4df4bd65eac6ce6fea25b65f6d97b25bc960830bd787e744d09620e6249e7"} Jan 02 00:18:06 crc kubenswrapper[4766]: I0102 00:18:06.270672 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-7fdd459f5-l8vpz" event={"ID":"44ef2541-95e7-471c-a509-c2c3bb4f9141","Type":"ContainerStarted","Data":"e69a555b1b3662690a965b047786e8861bc7d065b3fd04cfa56a3c17ff57ffe7"} Jan 02 00:18:06 crc kubenswrapper[4766]: I0102 00:18:06.273066 4766 generic.go:334] "Generic (PLEG): container finished" podID="0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b" containerID="68d5342642e211efaf5921387e5af6bfb7787dae1f0d6ea4e818571b4702d754" exitCode=0 Jan 02 00:18:06 crc kubenswrapper[4766]: I0102 00:18:06.273685 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" event={"ID":"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b","Type":"ContainerDied","Data":"68d5342642e211efaf5921387e5af6bfb7787dae1f0d6ea4e818571b4702d754"} Jan 02 00:18:06 crc kubenswrapper[4766]: I0102 00:18:06.276323 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-t8dpk" event={"ID":"0bae17f8-e0b5-4658-a923-279617d6d958","Type":"ContainerStarted","Data":"4a1a22bece4c91492eedd0ba78bd042bc69c2f97110add76d263197645672037"} Jan 02 00:18:06 crc kubenswrapper[4766]: I0102 00:18:06.280927 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-qptw2" event={"ID":"d0926af8-bde6-483d-b35a-06b9f1e39558","Type":"ContainerStarted","Data":"09b3701af96cb0c5d36a352e7e171cdfbfd8ea2074b78ed9849ab3a115667b97"} Jan 02 00:18:06 crc kubenswrapper[4766]: I0102 00:18:06.281060 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5bf474d74f-qptw2" Jan 02 00:18:06 crc kubenswrapper[4766]: I0102 00:18:06.323603 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5bf474d74f-qptw2" podStartSLOduration=3.078355289 podStartE2EDuration="21.323584327s" podCreationTimestamp="2026-01-02 00:17:45 +0000 UTC" firstStartedPulling="2026-01-02 00:17:47.183000966 +0000 UTC m=+641.981488110" lastFinishedPulling="2026-01-02 00:18:05.428229994 +0000 UTC m=+660.226717148" observedRunningTime="2026-01-02 00:18:06.32261972 +0000 UTC m=+661.121106874" watchObservedRunningTime="2026-01-02 00:18:06.323584327 +0000 UTC m=+661.122071491" Jan 02 00:18:06 crc kubenswrapper[4766]: I0102 00:18:06.369664 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-t8dpk" podStartSLOduration=3.970596407 podStartE2EDuration="22.36964712s" podCreationTimestamp="2026-01-02 00:17:44 +0000 UTC" firstStartedPulling="2026-01-02 00:17:47.052366678 +0000 UTC m=+641.850853832" lastFinishedPulling="2026-01-02 00:18:05.451417391 +0000 UTC m=+660.249904545" observedRunningTime="2026-01-02 00:18:06.357360233 +0000 UTC m=+661.155847387" watchObservedRunningTime="2026-01-02 00:18:06.36964712 +0000 UTC m=+661.168134274" Jan 02 00:18:07 crc kubenswrapper[4766]: I0102 00:18:07.289548 4766 generic.go:334] "Generic (PLEG): container finished" podID="0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b" containerID="5117579c25ef8e8128529375e03c6dcf263008ad0896a168f8a59f43de929fd8" exitCode=0 Jan 02 00:18:07 crc kubenswrapper[4766]: I0102 00:18:07.290654 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" event={"ID":"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b","Type":"ContainerDied","Data":"5117579c25ef8e8128529375e03c6dcf263008ad0896a168f8a59f43de929fd8"} Jan 02 00:18:08 crc kubenswrapper[4766]: I0102 00:18:08.635970 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" Jan 02 00:18:08 crc kubenswrapper[4766]: I0102 00:18:08.770829 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-bundle\") pod \"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b\" (UID: \"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b\") " Jan 02 00:18:08 crc kubenswrapper[4766]: I0102 00:18:08.770882 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-util\") pod \"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b\" (UID: \"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b\") " Jan 02 00:18:08 crc kubenswrapper[4766]: I0102 00:18:08.770916 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvnls\" (UniqueName: \"kubernetes.io/projected/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-kube-api-access-lvnls\") pod \"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b\" (UID: \"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b\") " Jan 02 00:18:08 crc kubenswrapper[4766]: I0102 00:18:08.772790 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-bundle" (OuterVolumeSpecName: "bundle") pod "0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b" (UID: "0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:18:08 crc kubenswrapper[4766]: I0102 00:18:08.798990 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-util" (OuterVolumeSpecName: "util") pod "0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b" (UID: "0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:18:08 crc kubenswrapper[4766]: I0102 00:18:08.799455 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-kube-api-access-lvnls" (OuterVolumeSpecName: "kube-api-access-lvnls") pod "0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b" (UID: "0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b"). InnerVolumeSpecName "kube-api-access-lvnls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:18:08 crc kubenswrapper[4766]: I0102 00:18:08.872391 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvnls\" (UniqueName: \"kubernetes.io/projected/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-kube-api-access-lvnls\") on node \"crc\" DevicePath \"\"" Jan 02 00:18:08 crc kubenswrapper[4766]: I0102 00:18:08.872657 4766 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-bundle\") on node \"crc\" DevicePath \"\"" Jan 02 00:18:08 crc kubenswrapper[4766]: I0102 00:18:08.872666 4766 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b-util\") on node \"crc\" DevicePath \"\"" Jan 02 00:18:09 crc kubenswrapper[4766]: I0102 00:18:09.315067 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" event={"ID":"0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b","Type":"ContainerDied","Data":"b7b8db7bdf0c944f521ae009e7424c7d2022ce8c600527ddfa31b4478b224365"} Jan 02 00:18:09 crc kubenswrapper[4766]: I0102 00:18:09.315103 4766 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7b8db7bdf0c944f521ae009e7424c7d2022ce8c600527ddfa31b4478b224365" Jan 02 00:18:09 crc kubenswrapper[4766]: I0102 00:18:09.315124 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4" Jan 02 00:18:10 crc kubenswrapper[4766]: I0102 00:18:10.322821 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-7fdd459f5-l8vpz" event={"ID":"44ef2541-95e7-471c-a509-c2c3bb4f9141","Type":"ContainerStarted","Data":"4c9b8fe4be95787a030ffa817d0edf7bf802d7e3a90443aa1373605ddfd3aaec"} Jan 02 00:18:10 crc kubenswrapper[4766]: I0102 00:18:10.340715 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elastic-operator-7fdd459f5-l8vpz" podStartSLOduration=16.001223833 podStartE2EDuration="20.340697608s" podCreationTimestamp="2026-01-02 00:17:50 +0000 UTC" firstStartedPulling="2026-01-02 00:18:05.78602827 +0000 UTC m=+660.584515424" lastFinishedPulling="2026-01-02 00:18:10.125502045 +0000 UTC m=+664.923989199" observedRunningTime="2026-01-02 00:18:10.337466739 +0000 UTC m=+665.135953893" watchObservedRunningTime="2026-01-02 00:18:10.340697608 +0000 UTC m=+665.139184762" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.295076 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 02 00:18:11 crc kubenswrapper[4766]: E0102 00:18:11.296002 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b" containerName="util" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.296033 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b" containerName="util" Jan 02 00:18:11 crc kubenswrapper[4766]: E0102 00:18:11.296051 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b" containerName="extract" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.296061 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b" containerName="extract" Jan 02 00:18:11 crc kubenswrapper[4766]: E0102 00:18:11.296092 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b" containerName="pull" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.296101 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b" containerName="pull" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.296290 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b" containerName="extract" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.297417 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.301077 4766 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-remote-ca" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.301127 4766 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-internal-users" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.301212 4766 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-config" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.301309 4766 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-http-certs-internal" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.302366 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-scripts" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.302365 4766 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-dockercfg-zvq8s" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.304940 4766 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-xpack-file-realm" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.304940 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-unicast-hosts" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.305876 4766 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-transport-certs" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.312302 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.332173 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.332253 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/79a8a59e-c756-4178-af3b-4e9cfb505181-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.332277 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.332770 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.332798 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.332814 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.332861 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.333201 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.333236 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.333257 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.333275 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.333300 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.333322 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.333340 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.333359 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435231 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435285 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435311 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435339 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435367 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435399 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435415 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435430 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435460 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435487 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/79a8a59e-c756-4178-af3b-4e9cfb505181-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435507 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435527 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435548 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435564 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435588 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435823 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435935 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.435971 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.436114 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.436366 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.436659 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.436794 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/79a8a59e-c756-4178-af3b-4e9cfb505181-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.437937 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.443995 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.445813 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.448328 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.448484 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.458412 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/79a8a59e-c756-4178-af3b-4e9cfb505181-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.458651 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.464121 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/79a8a59e-c756-4178-af3b-4e9cfb505181-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"79a8a59e-c756-4178-af3b-4e9cfb505181\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:11 crc kubenswrapper[4766]: I0102 00:18:11.616158 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:16 crc kubenswrapper[4766]: I0102 00:18:16.556725 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5bf474d74f-qptw2" Jan 02 00:18:19 crc kubenswrapper[4766]: I0102 00:18:19.263479 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 02 00:18:19 crc kubenswrapper[4766]: W0102 00:18:19.266143 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod79a8a59e_c756_4178_af3b_4e9cfb505181.slice/crio-cbb161ac158a956d6cd22969a25cfc5676d8991d0ffd4932be45282e090600bf WatchSource:0}: Error finding container cbb161ac158a956d6cd22969a25cfc5676d8991d0ffd4932be45282e090600bf: Status 404 returned error can't find the container with id cbb161ac158a956d6cd22969a25cfc5676d8991d0ffd4932be45282e090600bf Jan 02 00:18:19 crc kubenswrapper[4766]: I0102 00:18:19.444906 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-dvfds" event={"ID":"f3106de7-eb28-489f-90df-8efab3a6d338","Type":"ContainerStarted","Data":"b7266dee61d3c5af10aac99070d57b902ecd276282915c71abd2e1f73bdc1557"} Jan 02 00:18:19 crc kubenswrapper[4766]: I0102 00:18:19.446303 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-clrx5" event={"ID":"c2cc883f-2ba7-499a-b5cc-b84de8b742ba","Type":"ContainerStarted","Data":"e766bbafddfc14a19e17a4fe3e72b523843ba48711ae57067f47987d194ae5b2"} Jan 02 00:18:19 crc kubenswrapper[4766]: I0102 00:18:19.446595 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-59bdc8b94-clrx5" Jan 02 00:18:19 crc kubenswrapper[4766]: I0102 00:18:19.448166 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc" event={"ID":"9847ef6e-30b8-452a-9b4f-53cb6aabe7ca","Type":"ContainerStarted","Data":"9aeeb0e360ef1e0cb2e4d8592c321532a3611586f549dba75a1ea7a4295be80d"} Jan 02 00:18:19 crc kubenswrapper[4766]: I0102 00:18:19.448705 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-59bdc8b94-clrx5" Jan 02 00:18:19 crc kubenswrapper[4766]: I0102 00:18:19.449180 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"79a8a59e-c756-4178-af3b-4e9cfb505181","Type":"ContainerStarted","Data":"cbb161ac158a956d6cd22969a25cfc5676d8991d0ffd4932be45282e090600bf"} Jan 02 00:18:19 crc kubenswrapper[4766]: I0102 00:18:19.450330 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-l98hh" event={"ID":"3d8010c4-a7db-48af-8230-720463cc00a3","Type":"ContainerStarted","Data":"8988e682df4c7695f15008d768da54b9dbe040c1a0b711e05279e652fd6632d3"} Jan 02 00:18:19 crc kubenswrapper[4766]: I0102 00:18:19.461576 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/interconnect-operator-5bb49f789d-dvfds" podStartSLOduration=11.569594126 podStartE2EDuration="24.461561603s" podCreationTimestamp="2026-01-02 00:17:55 +0000 UTC" firstStartedPulling="2026-01-02 00:18:05.834667194 +0000 UTC m=+660.633154348" lastFinishedPulling="2026-01-02 00:18:18.726634671 +0000 UTC m=+673.525121825" observedRunningTime="2026-01-02 00:18:19.457571793 +0000 UTC m=+674.256058937" watchObservedRunningTime="2026-01-02 00:18:19.461561603 +0000 UTC m=+674.260048757" Jan 02 00:18:19 crc kubenswrapper[4766]: I0102 00:18:19.491635 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-l98hh" podStartSLOduration=1.452540111 podStartE2EDuration="34.491612227s" podCreationTimestamp="2026-01-02 00:17:45 +0000 UTC" firstStartedPulling="2026-01-02 00:17:45.709381504 +0000 UTC m=+640.507868648" lastFinishedPulling="2026-01-02 00:18:18.7484536 +0000 UTC m=+673.546940764" observedRunningTime="2026-01-02 00:18:19.486024253 +0000 UTC m=+674.284511417" watchObservedRunningTime="2026-01-02 00:18:19.491612227 +0000 UTC m=+674.290099401" Jan 02 00:18:19 crc kubenswrapper[4766]: I0102 00:18:19.615318 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-59bdc8b94-clrx5" podStartSLOduration=2.772626383 podStartE2EDuration="34.61529846s" podCreationTimestamp="2026-01-02 00:17:45 +0000 UTC" firstStartedPulling="2026-01-02 00:17:46.80542538 +0000 UTC m=+641.603912534" lastFinishedPulling="2026-01-02 00:18:18.648097447 +0000 UTC m=+673.446584611" observedRunningTime="2026-01-02 00:18:19.611789444 +0000 UTC m=+674.410276598" watchObservedRunningTime="2026-01-02 00:18:19.61529846 +0000 UTC m=+674.413785614" Jan 02 00:18:19 crc kubenswrapper[4766]: I0102 00:18:19.617032 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc" podStartSLOduration=1.7233562519999999 podStartE2EDuration="34.617025658s" podCreationTimestamp="2026-01-02 00:17:45 +0000 UTC" firstStartedPulling="2026-01-02 00:17:45.855888594 +0000 UTC m=+640.654375748" lastFinishedPulling="2026-01-02 00:18:18.749558 +0000 UTC m=+673.548045154" observedRunningTime="2026-01-02 00:18:19.522402181 +0000 UTC m=+674.320889335" watchObservedRunningTime="2026-01-02 00:18:19.617025658 +0000 UTC m=+674.415512812" Jan 02 00:18:30 crc kubenswrapper[4766]: I0102 00:18:30.733519 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vp25h"] Jan 02 00:18:30 crc kubenswrapper[4766]: I0102 00:18:30.735662 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vp25h" Jan 02 00:18:30 crc kubenswrapper[4766]: I0102 00:18:30.738937 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Jan 02 00:18:30 crc kubenswrapper[4766]: I0102 00:18:30.739226 4766 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-bkm6t" Jan 02 00:18:30 crc kubenswrapper[4766]: I0102 00:18:30.739371 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Jan 02 00:18:30 crc kubenswrapper[4766]: I0102 00:18:30.896132 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/ee59214a-8fc5-430d-b5e9-ebb3dfa7406e-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-vp25h\" (UID: \"ee59214a-8fc5-430d-b5e9-ebb3dfa7406e\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vp25h" Jan 02 00:18:30 crc kubenswrapper[4766]: I0102 00:18:30.896217 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvn7d\" (UniqueName: \"kubernetes.io/projected/ee59214a-8fc5-430d-b5e9-ebb3dfa7406e-kube-api-access-nvn7d\") pod \"cert-manager-operator-controller-manager-5446d6888b-vp25h\" (UID: \"ee59214a-8fc5-430d-b5e9-ebb3dfa7406e\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vp25h" Jan 02 00:18:30 crc kubenswrapper[4766]: I0102 00:18:30.916948 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vp25h"] Jan 02 00:18:30 crc kubenswrapper[4766]: I0102 00:18:30.997900 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/ee59214a-8fc5-430d-b5e9-ebb3dfa7406e-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-vp25h\" (UID: \"ee59214a-8fc5-430d-b5e9-ebb3dfa7406e\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vp25h" Jan 02 00:18:30 crc kubenswrapper[4766]: I0102 00:18:30.997984 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvn7d\" (UniqueName: \"kubernetes.io/projected/ee59214a-8fc5-430d-b5e9-ebb3dfa7406e-kube-api-access-nvn7d\") pod \"cert-manager-operator-controller-manager-5446d6888b-vp25h\" (UID: \"ee59214a-8fc5-430d-b5e9-ebb3dfa7406e\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vp25h" Jan 02 00:18:30 crc kubenswrapper[4766]: I0102 00:18:30.998499 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/ee59214a-8fc5-430d-b5e9-ebb3dfa7406e-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-vp25h\" (UID: \"ee59214a-8fc5-430d-b5e9-ebb3dfa7406e\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vp25h" Jan 02 00:18:31 crc kubenswrapper[4766]: I0102 00:18:31.027192 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvn7d\" (UniqueName: \"kubernetes.io/projected/ee59214a-8fc5-430d-b5e9-ebb3dfa7406e-kube-api-access-nvn7d\") pod \"cert-manager-operator-controller-manager-5446d6888b-vp25h\" (UID: \"ee59214a-8fc5-430d-b5e9-ebb3dfa7406e\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vp25h" Jan 02 00:18:31 crc kubenswrapper[4766]: I0102 00:18:31.055403 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vp25h" Jan 02 00:18:38 crc kubenswrapper[4766]: I0102 00:18:38.191234 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vp25h"] Jan 02 00:18:38 crc kubenswrapper[4766]: W0102 00:18:38.192011 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee59214a_8fc5_430d_b5e9_ebb3dfa7406e.slice/crio-923a49b6b0a27315a7d90df9eb02daf07bb1ae40124326345337e53dd25ad1aa WatchSource:0}: Error finding container 923a49b6b0a27315a7d90df9eb02daf07bb1ae40124326345337e53dd25ad1aa: Status 404 returned error can't find the container with id 923a49b6b0a27315a7d90df9eb02daf07bb1ae40124326345337e53dd25ad1aa Jan 02 00:18:38 crc kubenswrapper[4766]: I0102 00:18:38.828476 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"79a8a59e-c756-4178-af3b-4e9cfb505181","Type":"ContainerStarted","Data":"9a18bbe1cc6025cf3b64a9c7b4a8cb1a6448b2dba34fae59dc980bf50bdbd921"} Jan 02 00:18:38 crc kubenswrapper[4766]: I0102 00:18:38.830440 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vp25h" event={"ID":"ee59214a-8fc5-430d-b5e9-ebb3dfa7406e","Type":"ContainerStarted","Data":"923a49b6b0a27315a7d90df9eb02daf07bb1ae40124326345337e53dd25ad1aa"} Jan 02 00:18:38 crc kubenswrapper[4766]: I0102 00:18:38.995369 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 02 00:18:39 crc kubenswrapper[4766]: I0102 00:18:39.161151 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 02 00:18:40 crc kubenswrapper[4766]: I0102 00:18:40.843990 4766 generic.go:334] "Generic (PLEG): container finished" podID="79a8a59e-c756-4178-af3b-4e9cfb505181" containerID="9a18bbe1cc6025cf3b64a9c7b4a8cb1a6448b2dba34fae59dc980bf50bdbd921" exitCode=0 Jan 02 00:18:40 crc kubenswrapper[4766]: I0102 00:18:40.844026 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"79a8a59e-c756-4178-af3b-4e9cfb505181","Type":"ContainerDied","Data":"9a18bbe1cc6025cf3b64a9c7b4a8cb1a6448b2dba34fae59dc980bf50bdbd921"} Jan 02 00:18:41 crc kubenswrapper[4766]: I0102 00:18:41.852427 4766 generic.go:334] "Generic (PLEG): container finished" podID="79a8a59e-c756-4178-af3b-4e9cfb505181" containerID="e2f2aef4812bbefe447c887d7ae78785f8db4e1a35ae3ca12f58efdd91db81b7" exitCode=0 Jan 02 00:18:41 crc kubenswrapper[4766]: I0102 00:18:41.852718 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"79a8a59e-c756-4178-af3b-4e9cfb505181","Type":"ContainerDied","Data":"e2f2aef4812bbefe447c887d7ae78785f8db4e1a35ae3ca12f58efdd91db81b7"} Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.066371 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.069789 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.072199 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-1-global-ca" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.072337 4766 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-zc2kc" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.072457 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-1-ca" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.072470 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-1-sys-config" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.080687 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.131475 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-container-storage-root\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.131914 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-buildworkdir\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.131981 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-container-storage-run\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.132014 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/cc801b7d-f015-461e-8a74-47efbf9e4a5e-node-pullsecrets\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.132060 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-blob-cache\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.132077 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.132096 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-proxy-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.132180 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/cc801b7d-f015-461e-8a74-47efbf9e4a5e-buildcachedir\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.132266 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-system-configs\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.132285 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/cc801b7d-f015-461e-8a74-47efbf9e4a5e-builder-dockercfg-zc2kc-pull\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.132341 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/cc801b7d-f015-461e-8a74-47efbf9e4a5e-builder-dockercfg-zc2kc-push\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.132358 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-995xg\" (UniqueName: \"kubernetes.io/projected/cc801b7d-f015-461e-8a74-47efbf9e4a5e-kube-api-access-995xg\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.233988 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-995xg\" (UniqueName: \"kubernetes.io/projected/cc801b7d-f015-461e-8a74-47efbf9e4a5e-kube-api-access-995xg\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.234273 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-container-storage-root\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.234453 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-buildworkdir\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.234603 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-container-storage-run\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.234747 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/cc801b7d-f015-461e-8a74-47efbf9e4a5e-node-pullsecrets\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.234871 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-blob-cache\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.234799 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-container-storage-root\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.235031 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/cc801b7d-f015-461e-8a74-47efbf9e4a5e-node-pullsecrets\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.234997 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.235117 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/cc801b7d-f015-461e-8a74-47efbf9e4a5e-buildcachedir\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.235143 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-proxy-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.235196 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/cc801b7d-f015-461e-8a74-47efbf9e4a5e-buildcachedir\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.235208 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-system-configs\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.235272 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-blob-cache\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.235347 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-container-storage-run\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.235265 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/cc801b7d-f015-461e-8a74-47efbf9e4a5e-builder-dockercfg-zc2kc-pull\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.235423 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/cc801b7d-f015-461e-8a74-47efbf9e4a5e-builder-dockercfg-zc2kc-push\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.235695 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-system-configs\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.236105 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-buildworkdir\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.236558 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-proxy-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.237505 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.250546 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/cc801b7d-f015-461e-8a74-47efbf9e4a5e-builder-dockercfg-zc2kc-pull\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.250556 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/cc801b7d-f015-461e-8a74-47efbf9e4a5e-builder-dockercfg-zc2kc-push\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.253829 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-995xg\" (UniqueName: \"kubernetes.io/projected/cc801b7d-f015-461e-8a74-47efbf9e4a5e-kube-api-access-995xg\") pod \"service-telemetry-operator-1-build\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:45 crc kubenswrapper[4766]: I0102 00:18:45.387142 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:18:47 crc kubenswrapper[4766]: I0102 00:18:47.519065 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Jan 02 00:18:48 crc kubenswrapper[4766]: I0102 00:18:48.056055 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"cc801b7d-f015-461e-8a74-47efbf9e4a5e","Type":"ContainerStarted","Data":"75ee474755c7a75631672b281a4f14306d4b90695154020e5266d253dd49adbd"} Jan 02 00:18:48 crc kubenswrapper[4766]: I0102 00:18:48.058438 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"79a8a59e-c756-4178-af3b-4e9cfb505181","Type":"ContainerStarted","Data":"f0a1188b547bfad60e0e5f3f452acdc228204a3174e2920903cd491b564477a1"} Jan 02 00:18:48 crc kubenswrapper[4766]: I0102 00:18:48.058591 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:18:48 crc kubenswrapper[4766]: I0102 00:18:48.060275 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vp25h" event={"ID":"ee59214a-8fc5-430d-b5e9-ebb3dfa7406e","Type":"ContainerStarted","Data":"395926adacc7af2f7d940e7ff95427941a3952d8108838b01c33c1d08ea73491"} Jan 02 00:18:48 crc kubenswrapper[4766]: I0102 00:18:48.088750 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elasticsearch-es-default-0" podStartSLOduration=18.34769592 podStartE2EDuration="37.088730743s" podCreationTimestamp="2026-01-02 00:18:11 +0000 UTC" firstStartedPulling="2026-01-02 00:18:19.268041674 +0000 UTC m=+674.066528828" lastFinishedPulling="2026-01-02 00:18:38.009076507 +0000 UTC m=+692.807563651" observedRunningTime="2026-01-02 00:18:48.086871221 +0000 UTC m=+702.885358375" watchObservedRunningTime="2026-01-02 00:18:48.088730743 +0000 UTC m=+702.887217897" Jan 02 00:18:48 crc kubenswrapper[4766]: I0102 00:18:48.107970 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vp25h" podStartSLOduration=8.931587104 podStartE2EDuration="18.107949449s" podCreationTimestamp="2026-01-02 00:18:30 +0000 UTC" firstStartedPulling="2026-01-02 00:18:38.194874504 +0000 UTC m=+692.993361658" lastFinishedPulling="2026-01-02 00:18:47.371236849 +0000 UTC m=+702.169724003" observedRunningTime="2026-01-02 00:18:48.105304387 +0000 UTC m=+702.903791561" watchObservedRunningTime="2026-01-02 00:18:48.107949449 +0000 UTC m=+702.906436613" Jan 02 00:18:52 crc kubenswrapper[4766]: I0102 00:18:52.150562 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-59nnv"] Jan 02 00:18:52 crc kubenswrapper[4766]: I0102 00:18:52.151626 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-59nnv" Jan 02 00:18:52 crc kubenswrapper[4766]: I0102 00:18:52.158485 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 02 00:18:52 crc kubenswrapper[4766]: I0102 00:18:52.158678 4766 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-qmrw8" Jan 02 00:18:52 crc kubenswrapper[4766]: I0102 00:18:52.163060 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-59nnv"] Jan 02 00:18:52 crc kubenswrapper[4766]: I0102 00:18:52.168508 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 02 00:18:52 crc kubenswrapper[4766]: I0102 00:18:52.293373 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gz6dd\" (UniqueName: \"kubernetes.io/projected/16413439-0e97-4c36-996e-5b24f45244c3-kube-api-access-gz6dd\") pod \"cert-manager-cainjector-855d9ccff4-59nnv\" (UID: \"16413439-0e97-4c36-996e-5b24f45244c3\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-59nnv" Jan 02 00:18:52 crc kubenswrapper[4766]: I0102 00:18:52.293491 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/16413439-0e97-4c36-996e-5b24f45244c3-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-59nnv\" (UID: \"16413439-0e97-4c36-996e-5b24f45244c3\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-59nnv" Jan 02 00:18:52 crc kubenswrapper[4766]: I0102 00:18:52.394397 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/16413439-0e97-4c36-996e-5b24f45244c3-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-59nnv\" (UID: \"16413439-0e97-4c36-996e-5b24f45244c3\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-59nnv" Jan 02 00:18:52 crc kubenswrapper[4766]: I0102 00:18:52.394543 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gz6dd\" (UniqueName: \"kubernetes.io/projected/16413439-0e97-4c36-996e-5b24f45244c3-kube-api-access-gz6dd\") pod \"cert-manager-cainjector-855d9ccff4-59nnv\" (UID: \"16413439-0e97-4c36-996e-5b24f45244c3\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-59nnv" Jan 02 00:18:52 crc kubenswrapper[4766]: I0102 00:18:52.413942 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/16413439-0e97-4c36-996e-5b24f45244c3-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-59nnv\" (UID: \"16413439-0e97-4c36-996e-5b24f45244c3\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-59nnv" Jan 02 00:18:52 crc kubenswrapper[4766]: I0102 00:18:52.414953 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gz6dd\" (UniqueName: \"kubernetes.io/projected/16413439-0e97-4c36-996e-5b24f45244c3-kube-api-access-gz6dd\") pod \"cert-manager-cainjector-855d9ccff4-59nnv\" (UID: \"16413439-0e97-4c36-996e-5b24f45244c3\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-59nnv" Jan 02 00:18:52 crc kubenswrapper[4766]: I0102 00:18:52.475414 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-59nnv" Jan 02 00:18:53 crc kubenswrapper[4766]: I0102 00:18:53.304388 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-59nnv"] Jan 02 00:18:54 crc kubenswrapper[4766]: I0102 00:18:54.488911 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.222458 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.223678 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.228078 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-2-sys-config" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.228361 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-2-global-ca" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.228473 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-2-ca" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.255194 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.304633 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-builder-dockercfg-zc2kc-pull\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.304687 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-buildworkdir\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.304739 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-blob-cache\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.304765 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-buildcachedir\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.304822 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-proxy-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.304870 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbtns\" (UniqueName: \"kubernetes.io/projected/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-kube-api-access-vbtns\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.304890 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-container-storage-run\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.304926 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-builder-dockercfg-zc2kc-push\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.304959 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-system-configs\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.305104 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.305193 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-container-storage-root\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.305220 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-node-pullsecrets\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.406554 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbtns\" (UniqueName: \"kubernetes.io/projected/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-kube-api-access-vbtns\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.406922 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-container-storage-run\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.406958 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-builder-dockercfg-zc2kc-push\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.406978 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-system-configs\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.407009 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.407041 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-container-storage-root\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.407057 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-node-pullsecrets\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.407077 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-builder-dockercfg-zc2kc-pull\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.407101 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-buildworkdir\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.407131 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-blob-cache\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.407160 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-buildcachedir\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.407192 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-proxy-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.407917 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-proxy-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.408451 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-container-storage-run\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.409758 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-system-configs\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.409769 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.410014 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-blob-cache\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.410064 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-buildcachedir\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.411292 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-buildworkdir\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.411324 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-container-storage-root\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.411435 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-node-pullsecrets\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.414363 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-builder-dockercfg-zc2kc-push\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.414406 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-builder-dockercfg-zc2kc-pull\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.432861 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbtns\" (UniqueName: \"kubernetes.io/projected/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-kube-api-access-vbtns\") pod \"service-telemetry-operator-2-build\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.542572 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.908102 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-vcn6h"] Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.912968 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-vcn6h" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.915410 4766 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-jlbqh" Jan 02 00:18:56 crc kubenswrapper[4766]: I0102 00:18:56.920811 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-vcn6h"] Jan 02 00:18:57 crc kubenswrapper[4766]: I0102 00:18:57.015832 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr2mm\" (UniqueName: \"kubernetes.io/projected/c35a05ea-0e51-4237-a1a2-d3963f7a9512-kube-api-access-pr2mm\") pod \"cert-manager-webhook-f4fb5df64-vcn6h\" (UID: \"c35a05ea-0e51-4237-a1a2-d3963f7a9512\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vcn6h" Jan 02 00:18:57 crc kubenswrapper[4766]: I0102 00:18:57.015947 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c35a05ea-0e51-4237-a1a2-d3963f7a9512-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-vcn6h\" (UID: \"c35a05ea-0e51-4237-a1a2-d3963f7a9512\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vcn6h" Jan 02 00:18:57 crc kubenswrapper[4766]: I0102 00:18:57.117330 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr2mm\" (UniqueName: \"kubernetes.io/projected/c35a05ea-0e51-4237-a1a2-d3963f7a9512-kube-api-access-pr2mm\") pod \"cert-manager-webhook-f4fb5df64-vcn6h\" (UID: \"c35a05ea-0e51-4237-a1a2-d3963f7a9512\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vcn6h" Jan 02 00:18:57 crc kubenswrapper[4766]: I0102 00:18:57.117576 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c35a05ea-0e51-4237-a1a2-d3963f7a9512-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-vcn6h\" (UID: \"c35a05ea-0e51-4237-a1a2-d3963f7a9512\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vcn6h" Jan 02 00:18:57 crc kubenswrapper[4766]: I0102 00:18:57.132171 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c35a05ea-0e51-4237-a1a2-d3963f7a9512-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-vcn6h\" (UID: \"c35a05ea-0e51-4237-a1a2-d3963f7a9512\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vcn6h" Jan 02 00:18:57 crc kubenswrapper[4766]: I0102 00:18:57.138484 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr2mm\" (UniqueName: \"kubernetes.io/projected/c35a05ea-0e51-4237-a1a2-d3963f7a9512-kube-api-access-pr2mm\") pod \"cert-manager-webhook-f4fb5df64-vcn6h\" (UID: \"c35a05ea-0e51-4237-a1a2-d3963f7a9512\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vcn6h" Jan 02 00:18:57 crc kubenswrapper[4766]: I0102 00:18:57.230976 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-vcn6h" Jan 02 00:19:01 crc kubenswrapper[4766]: I0102 00:19:01.718278 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="79a8a59e-c756-4178-af3b-4e9cfb505181" containerName="elasticsearch" probeResult="failure" output=< Jan 02 00:19:01 crc kubenswrapper[4766]: {"timestamp": "2026-01-02T00:19:01+00:00", "message": "readiness probe failed", "curl_rc": "7"} Jan 02 00:19:01 crc kubenswrapper[4766]: > Jan 02 00:19:04 crc kubenswrapper[4766]: W0102 00:19:04.162514 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16413439_0e97_4c36_996e_5b24f45244c3.slice/crio-7a11adbab97743caaa6d0fcff496ca64280d47d655c180333588ec0bdd257eb2 WatchSource:0}: Error finding container 7a11adbab97743caaa6d0fcff496ca64280d47d655c180333588ec0bdd257eb2: Status 404 returned error can't find the container with id 7a11adbab97743caaa6d0fcff496ca64280d47d655c180333588ec0bdd257eb2 Jan 02 00:19:04 crc kubenswrapper[4766]: I0102 00:19:04.240562 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-59nnv" event={"ID":"16413439-0e97-4c36-996e-5b24f45244c3","Type":"ContainerStarted","Data":"7a11adbab97743caaa6d0fcff496ca64280d47d655c180333588ec0bdd257eb2"} Jan 02 00:19:04 crc kubenswrapper[4766]: E0102 00:19:04.576067 4766 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d7a908a23111a624c3fa04dc3105a7a97f48ee60105308bbb6ed42a40d63c2fe" Jan 02 00:19:04 crc kubenswrapper[4766]: E0102 00:19:04.576361 4766 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 02 00:19:04 crc kubenswrapper[4766]: init container &Container{Name:manage-dockerfile,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d7a908a23111a624c3fa04dc3105a7a97f48ee60105308bbb6ed42a40d63c2fe,Command:[],Args:[openshift-manage-dockerfile --v=0],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:BUILD,Value:{"kind":"Build","apiVersion":"build.openshift.io/v1","metadata":{"name":"service-telemetry-operator-1","namespace":"service-telemetry","uid":"c52e981e-c460-421c-816b-c2056de27272","resourceVersion":"33917","generation":1,"creationTimestamp":"2026-01-02T00:18:44Z","labels":{"build":"service-telemetry-operator","buildconfig":"service-telemetry-operator","openshift.io/build-config.name":"service-telemetry-operator","openshift.io/build.start-policy":"Serial"},"annotations":{"openshift.io/build-config.name":"service-telemetry-operator","openshift.io/build.number":"1"},"ownerReferences":[{"apiVersion":"build.openshift.io/v1","kind":"BuildConfig","name":"service-telemetry-operator","uid":"caf54d57-d39d-4bd8-8706-7407a635cd8f","controller":true}],"managedFields":[{"manager":"openshift-apiserver","operation":"Update","apiVersion":"build.openshift.io/v1","time":"2026-01-02T00:18:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:openshift.io/build-config.name":{},"f:openshift.io/build.number":{}},"f:labels":{".":{},"f:build":{},"f:buildconfig":{},"f:openshift.io/build-config.name":{},"f:openshift.io/build.start-policy":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"caf54d57-d39d-4bd8-8706-7407a635cd8f\"}":{}}},"f:spec":{"f:output":{"f:to":{}},"f:serviceAccount":{},"f:source":{"f:dockerfile":{},"f:type":{}},"f:strategy":{"f:dockerStrategy":{".":{},"f:from":{}},"f:type":{}},"f:triggeredBy":{}},"f:status":{"f:conditions":{".":{},"k:{\"type\":\"New\"}":{".":{},"f:lastTransitionTime":{},"f:lastUpdateTime":{},"f:status":{},"f:type":{}}},"f:config":{},"f:phase":{}}}}]},"spec":{"serviceAccount":"builder","source":{"type":"Dockerfile","dockerfile":"FROM quay.io/operator-framework/ansible-operator:v1.38.1\n\n# temporarily switch to root user to adjust image layers\nUSER 0\n# Upstream CI builds need the additional EPEL sources for python3-passlib and python3-bcrypt but have no working repos to install epel-release\n# NO_PROXY is undefined in upstream CI builds, but defined (usually blank) during openshift builds (a possibly brittle hack)\nRUN bash -c -- 'if [ \"${NO_PROXY:-__ZZZZZ}\" == \"__ZZZZZ\" ]; then echo \"Applying upstream EPEL hacks\" \u0026\u0026 echo -e \"-----BEGIN PGP PUBLIC KEY BLOCK-----\\nmQINBGE3mOsBEACsU+XwJWDJVkItBaugXhXIIkb9oe+7aadELuVo0kBmc3HXt/Yp\\nCJW9hHEiGZ6z2jwgPqyJjZhCvcAWvgzKcvqE+9i0NItV1rzfxrBe2BtUtZmVcuE6\\n2b+SPfxQ2Hr8llaawRjt8BCFX/ZzM4/1Qk+EzlfTcEcpkMf6wdO7kD6ulBk/tbsW\\nDHX2lNcxszTf+XP9HXHWJlA2xBfP+Dk4gl4DnO2Y1xR0OSywE/QtvEbN5cY94ieu\\nn7CBy29AleMhmbnx9pw3NyxcFIAsEZHJoU4ZW9ulAJ/ogttSyAWeacW7eJGW31/Z\\n39cS+I4KXJgeGRI20RmpqfH0tuT+X5Da59YpjYxkbhSK3HYBVnNPhoJFUc2j5iKy\\nXLgkapu1xRnEJhw05kr4LCbud0NTvfecqSqa+59kuVc+zWmfTnGTYc0PXZ6Oa3rK\\n44UOmE6eAT5zd/ToleDO0VesN+EO7CXfRsm7HWGpABF5wNK3vIEF2uRr2VJMvgqS\\n9eNwhJyOzoca4xFSwCkc6dACGGkV+CqhufdFBhmcAsUotSxe3zmrBjqA0B/nxIvH\\nDVgOAMnVCe+Lmv8T0mFgqZSJdIUdKjnOLu/GRFhjDKIak4jeMBMTYpVnU+HhMHLq\\nuDiZkNEvEEGhBQmZuI8J55F/a6UURnxUwT3piyi3Pmr2IFD7ahBxPzOBCQARAQAB\\ntCdGZWRvcmEgKGVwZWw5KSA8ZXBlbEBmZWRvcmFwcm9qZWN0Lm9yZz6JAk4EEwEI\\nADgWIQT/itE0RZcQbs6BO5GKOHK/MihGfAUCYTeY6wIbDwULCQgHAgYVCgkICwIE\\nFgIDAQIeAQIXgAAKCRCKOHK/MihGfFX/EACBPWv20+ttYu1A5WvtHJPzwbj0U4yF\\n3zTQpBglQ2UfkRpYdipTlT3Ih6j5h2VmgRPtINCc/ZE28adrWpBoeFIS2YAKOCLC\\nnZYtHl2nCoLq1U7FSttUGsZ/t8uGCBgnugTfnIYcmlP1jKKA6RJAclK89evDQX5n\\nR9ZD+Cq3CBMlttvSTCht0qQVlwycedH8iWyYgP/mF0W35BIn7NuuZwWhgR00n/VG\\n4nbKPOzTWbsP45awcmivdrS74P6mL84WfkghipdmcoyVb1B8ZP4Y/Ke0RXOnLhNe\\nCfrXXvuW+Pvg2RTfwRDtehGQPAgXbmLmz2ZkV69RGIr54HJv84NDbqZovRTMr7gL\\n9k3ciCzXCiYQgM8yAyGHV0KEhFSQ1HV7gMnt9UmxbxBE2pGU7vu3CwjYga5DpwU7\\nw5wu1TmM5KgZtZvuWOTDnqDLf0cKoIbW8FeeCOn24elcj32bnQDuF9DPey1mqcvT\\n/yEo/Ushyz6CVYxN8DGgcy2M9JOsnmjDx02h6qgWGWDuKgb9jZrvRedpAQCeemEd\\nfhEs6ihqVxRFl16HxC4EVijybhAL76SsM2nbtIqW1apBQJQpXWtQwwdvgTVpdEtE\\nr4ArVJYX5LrswnWEQMOelugUG6S3ZjMfcyOa/O0364iY73vyVgaYK+2XtT2usMux\\nVL469Kj5m13T6w==\\n=Mjs/\\n-----END PGP PUBLIC KEY BLOCK-----\" \u003e /etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-9 \u0026\u0026 echo -e \"[epel]\\nname=Extra Packages for Enterprise Linux 9 - \\$basearch\\nmetalink=https://mirrors.fedoraproject.org/metalink?repo=epel-9\u0026arch=\\$basearch\u0026infra=\\$infra\u0026content=\\$contentdir\\nenabled=1\\ngpgcheck=1\\ngpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-9\" \u003e /etc/yum.repos.d/epel.repo; fi'\n\n# update the base image to allow forward-looking optimistic updates during the testing phase, with the added benefit of helping move closer to passing security scans.\n# -- excludes ansible so it remains at 2.9 tag as shipped with the base image\n# -- installs python3-passlib and python3-bcrypt for oauth-proxy interface\n# -- cleans up the cached data from dnf to keep the image as small as possible\nRUN dnf update -y --exclude=ansible* \u0026\u0026 dnf install -y python3-passlib python3-bcrypt \u0026\u0026 dnf clean all \u0026\u0026 rm -rf /var/cache/dnf\n\nCOPY requirements.yml ${HOME}/requirements.yml\nRUN ansible-galaxy collection install -r ${HOME}/requirements.yml \\\n \u0026\u0026 chmod -R ug+rwx ${HOME}/.ansible\n\n# switch back to user 1001 when running the base image (non-root)\nUSER 1001\n\n# copy in required artifacts for the operator\nCOPY watches.yaml ${HOME}/watches.yaml\nCOPY roles/ ${HOME}/roles/\n"},"strategy":{"type":"Docker","dockerStrategy":{"from":{"kind":"DockerImage","name":"quay.io/operator-framework/ansible-operator@sha256:9895727b7f66bb88fa4c6afdefc7eecf86e6b7c1293920f866a035da9decc58e"},"pullSecret":{"name":"builder-dockercfg-zc2kc"}}},"output":{"to":{"kind":"DockerImage","name":"image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-operator:latest"},"pushSecret":{"name":"builder-dockercfg-zc2kc"}},"resources":{},"postCommit":{},"nodeSelector":null,"triggeredBy":[{"message":"Image change","imageChangeBuild":{"imageID":"quay.io/operator-framework/ansible-operator@sha256:9895727b7f66bb88fa4c6afdefc7eecf86e6b7c1293920f866a035da9decc58e","fromRef":{"kind":"ImageStreamTag","name":"ansible-operator:v1.38.1"}}}]},"status":{"phase":"New","outputDockerImageReference":"image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-operator:latest","config":{"kind":"BuildConfig","namespace":"service-telemetry","name":"service-telemetry-operator"},"output":{},"conditions":[{"type":"New","status":"True","lastUpdateTime":"2026-01-02T00:18:44Z","lastTransitionTime":"2026-01-02T00:18:44Z"}]}} Jan 02 00:19:04 crc kubenswrapper[4766]: ,ValueFrom:nil,},EnvVar{Name:LANG,Value:C.utf8,ValueFrom:nil,},EnvVar{Name:BUILD_REGISTRIES_CONF_PATH,Value:/var/run/configs/openshift.io/build-system/registries.conf,ValueFrom:nil,},EnvVar{Name:BUILD_REGISTRIES_DIR_PATH,Value:/var/run/configs/openshift.io/build-system/registries.d,ValueFrom:nil,},EnvVar{Name:BUILD_SIGNATURE_POLICY_PATH,Value:/var/run/configs/openshift.io/build-system/policy.json,ValueFrom:nil,},EnvVar{Name:BUILD_STORAGE_CONF_PATH,Value:/var/run/configs/openshift.io/build-system/storage.conf,ValueFrom:nil,},EnvVar{Name:BUILD_BLOBCACHE_DIR,Value:/var/cache/blobs,ValueFrom:nil,},EnvVar{Name:HTTP_PROXY,Value:,ValueFrom:nil,},EnvVar{Name:http_proxy,Value:,ValueFrom:nil,},EnvVar{Name:HTTPS_PROXY,Value:,ValueFrom:nil,},EnvVar{Name:https_proxy,Value:,ValueFrom:nil,},EnvVar{Name:NO_PROXY,Value:,ValueFrom:nil,},EnvVar{Name:no_proxy,Value:,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:buildworkdir,ReadOnly:false,MountPath:/tmp/build,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:build-system-configs,ReadOnly:true,MountPath:/var/run/configs/openshift.io/build-system,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:build-ca-bundles,ReadOnly:false,MountPath:/var/run/configs/openshift.io/certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:build-proxy-ca-bundles,ReadOnly:false,MountPath:/var/run/configs/openshift.io/pki,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:build-blob-cache,ReadOnly:false,MountPath:/var/cache/blobs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-995xg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[CHOWN DAC_OVERRIDE],Drop:[ALL],},Privileged:*false,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:*false,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*0,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod service-telemetry-operator-1-build_service-telemetry(cc801b7d-f015-461e-8a74-47efbf9e4a5e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled Jan 02 00:19:04 crc kubenswrapper[4766]: > logger="UnhandledError" Jan 02 00:19:04 crc kubenswrapper[4766]: E0102 00:19:04.578625 4766 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manage-dockerfile\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/service-telemetry-operator-1-build" podUID="cc801b7d-f015-461e-8a74-47efbf9e4a5e" Jan 02 00:19:04 crc kubenswrapper[4766]: I0102 00:19:04.766496 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-vcn6h"] Jan 02 00:19:04 crc kubenswrapper[4766]: W0102 00:19:04.778095 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc35a05ea_0e51_4237_a1a2_d3963f7a9512.slice/crio-612ac5425a0c7f481da4390598399213c22ec8867420b662019a9dff72e1287b WatchSource:0}: Error finding container 612ac5425a0c7f481da4390598399213c22ec8867420b662019a9dff72e1287b: Status 404 returned error can't find the container with id 612ac5425a0c7f481da4390598399213c22ec8867420b662019a9dff72e1287b Jan 02 00:19:05 crc kubenswrapper[4766]: W0102 00:19:05.074410 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c3d16a7_b6e1_4bbc_b8de_004574d80d7e.slice/crio-682605bb7a699f43e537bd913a3a2aa0e9839c3eecb97c2b62531bbadfac9f4b WatchSource:0}: Error finding container 682605bb7a699f43e537bd913a3a2aa0e9839c3eecb97c2b62531bbadfac9f4b: Status 404 returned error can't find the container with id 682605bb7a699f43e537bd913a3a2aa0e9839c3eecb97c2b62531bbadfac9f4b Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.084195 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.252814 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-vcn6h" event={"ID":"c35a05ea-0e51-4237-a1a2-d3963f7a9512","Type":"ContainerStarted","Data":"612ac5425a0c7f481da4390598399213c22ec8867420b662019a9dff72e1287b"} Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.256321 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e","Type":"ContainerStarted","Data":"682605bb7a699f43e537bd913a3a2aa0e9839c3eecb97c2b62531bbadfac9f4b"} Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.613181 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.735970 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-995xg\" (UniqueName: \"kubernetes.io/projected/cc801b7d-f015-461e-8a74-47efbf9e4a5e-kube-api-access-995xg\") pod \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.736054 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-buildworkdir\") pod \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.736096 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-container-storage-root\") pod \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.736138 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/cc801b7d-f015-461e-8a74-47efbf9e4a5e-builder-dockercfg-zc2kc-push\") pod \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.736181 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-container-storage-run\") pod \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.736214 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/cc801b7d-f015-461e-8a74-47efbf9e4a5e-node-pullsecrets\") pod \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.736258 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-blob-cache\") pod \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.736298 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-system-configs\") pod \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.736343 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-ca-bundles\") pod \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.736377 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/cc801b7d-f015-461e-8a74-47efbf9e4a5e-builder-dockercfg-zc2kc-pull\") pod \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.736409 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/cc801b7d-f015-461e-8a74-47efbf9e4a5e-buildcachedir\") pod \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.736434 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-proxy-ca-bundles\") pod \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\" (UID: \"cc801b7d-f015-461e-8a74-47efbf9e4a5e\") " Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.737053 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "cc801b7d-f015-461e-8a74-47efbf9e4a5e" (UID: "cc801b7d-f015-461e-8a74-47efbf9e4a5e"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.737207 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "cc801b7d-f015-461e-8a74-47efbf9e4a5e" (UID: "cc801b7d-f015-461e-8a74-47efbf9e4a5e"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.737244 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cc801b7d-f015-461e-8a74-47efbf9e4a5e-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "cc801b7d-f015-461e-8a74-47efbf9e4a5e" (UID: "cc801b7d-f015-461e-8a74-47efbf9e4a5e"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.737269 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cc801b7d-f015-461e-8a74-47efbf9e4a5e-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "cc801b7d-f015-461e-8a74-47efbf9e4a5e" (UID: "cc801b7d-f015-461e-8a74-47efbf9e4a5e"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.737453 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "cc801b7d-f015-461e-8a74-47efbf9e4a5e" (UID: "cc801b7d-f015-461e-8a74-47efbf9e4a5e"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.737489 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "cc801b7d-f015-461e-8a74-47efbf9e4a5e" (UID: "cc801b7d-f015-461e-8a74-47efbf9e4a5e"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.737866 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "cc801b7d-f015-461e-8a74-47efbf9e4a5e" (UID: "cc801b7d-f015-461e-8a74-47efbf9e4a5e"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.737962 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "cc801b7d-f015-461e-8a74-47efbf9e4a5e" (UID: "cc801b7d-f015-461e-8a74-47efbf9e4a5e"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.738075 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "cc801b7d-f015-461e-8a74-47efbf9e4a5e" (UID: "cc801b7d-f015-461e-8a74-47efbf9e4a5e"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.743593 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc801b7d-f015-461e-8a74-47efbf9e4a5e-builder-dockercfg-zc2kc-pull" (OuterVolumeSpecName: "builder-dockercfg-zc2kc-pull") pod "cc801b7d-f015-461e-8a74-47efbf9e4a5e" (UID: "cc801b7d-f015-461e-8a74-47efbf9e4a5e"). InnerVolumeSpecName "builder-dockercfg-zc2kc-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.745707 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc801b7d-f015-461e-8a74-47efbf9e4a5e-kube-api-access-995xg" (OuterVolumeSpecName: "kube-api-access-995xg") pod "cc801b7d-f015-461e-8a74-47efbf9e4a5e" (UID: "cc801b7d-f015-461e-8a74-47efbf9e4a5e"). InnerVolumeSpecName "kube-api-access-995xg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.749061 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc801b7d-f015-461e-8a74-47efbf9e4a5e-builder-dockercfg-zc2kc-push" (OuterVolumeSpecName: "builder-dockercfg-zc2kc-push") pod "cc801b7d-f015-461e-8a74-47efbf9e4a5e" (UID: "cc801b7d-f015-461e-8a74-47efbf9e4a5e"). InnerVolumeSpecName "builder-dockercfg-zc2kc-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.837968 4766 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.838015 4766 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/cc801b7d-f015-461e-8a74-47efbf9e4a5e-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.838036 4766 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.838050 4766 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.838062 4766 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.838073 4766 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/cc801b7d-f015-461e-8a74-47efbf9e4a5e-builder-dockercfg-zc2kc-pull\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.838084 4766 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/cc801b7d-f015-461e-8a74-47efbf9e4a5e-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.838094 4766 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cc801b7d-f015-461e-8a74-47efbf9e4a5e-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.838104 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-995xg\" (UniqueName: \"kubernetes.io/projected/cc801b7d-f015-461e-8a74-47efbf9e4a5e-kube-api-access-995xg\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.838114 4766 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.838124 4766 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/cc801b7d-f015-461e-8a74-47efbf9e4a5e-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:05 crc kubenswrapper[4766]: I0102 00:19:05.838135 4766 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/cc801b7d-f015-461e-8a74-47efbf9e4a5e-builder-dockercfg-zc2kc-push\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:06 crc kubenswrapper[4766]: I0102 00:19:06.295820 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"cc801b7d-f015-461e-8a74-47efbf9e4a5e","Type":"ContainerDied","Data":"75ee474755c7a75631672b281a4f14306d4b90695154020e5266d253dd49adbd"} Jan 02 00:19:06 crc kubenswrapper[4766]: I0102 00:19:06.296053 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Jan 02 00:19:06 crc kubenswrapper[4766]: I0102 00:19:06.304983 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e","Type":"ContainerStarted","Data":"e99639862fc7bc920244be7b4f81eaf7f6dee5da669052692eee463549347e3e"} Jan 02 00:19:06 crc kubenswrapper[4766]: I0102 00:19:06.390265 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Jan 02 00:19:06 crc kubenswrapper[4766]: E0102 00:19:06.400498 4766 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=6859640842973264640, SKID=, AKID=7F:76:81:70:19:3C:24:D8:32:FA:EF:88:D7:8C:10:8B:ED:75:04:FC failed: x509: certificate signed by unknown authority" Jan 02 00:19:06 crc kubenswrapper[4766]: I0102 00:19:06.408092 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Jan 02 00:19:06 crc kubenswrapper[4766]: I0102 00:19:06.727560 4766 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="79a8a59e-c756-4178-af3b-4e9cfb505181" containerName="elasticsearch" probeResult="failure" output=< Jan 02 00:19:06 crc kubenswrapper[4766]: {"timestamp": "2026-01-02T00:19:06+00:00", "message": "readiness probe failed", "curl_rc": "7"} Jan 02 00:19:06 crc kubenswrapper[4766]: > Jan 02 00:19:07 crc kubenswrapper[4766]: I0102 00:19:07.546455 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc801b7d-f015-461e-8a74-47efbf9e4a5e" path="/var/lib/kubelet/pods/cc801b7d-f015-461e-8a74-47efbf9e4a5e/volumes" Jan 02 00:19:07 crc kubenswrapper[4766]: I0102 00:19:07.566787 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Jan 02 00:19:08 crc kubenswrapper[4766]: I0102 00:19:08.316589 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-operator-2-build" podUID="7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" containerName="git-clone" containerID="cri-o://e99639862fc7bc920244be7b4f81eaf7f6dee5da669052692eee463549347e3e" gracePeriod=30 Jan 02 00:19:09 crc kubenswrapper[4766]: I0102 00:19:09.327015 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-2-build_7c3d16a7-b6e1-4bbc-b8de-004574d80d7e/git-clone/0.log" Jan 02 00:19:09 crc kubenswrapper[4766]: I0102 00:19:09.327298 4766 generic.go:334] "Generic (PLEG): container finished" podID="7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" containerID="e99639862fc7bc920244be7b4f81eaf7f6dee5da669052692eee463549347e3e" exitCode=1 Jan 02 00:19:09 crc kubenswrapper[4766]: I0102 00:19:09.327322 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e","Type":"ContainerDied","Data":"e99639862fc7bc920244be7b4f81eaf7f6dee5da669052692eee463549347e3e"} Jan 02 00:19:10 crc kubenswrapper[4766]: I0102 00:19:10.875027 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-2lsdf"] Jan 02 00:19:10 crc kubenswrapper[4766]: I0102 00:19:10.875827 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-2lsdf" Jan 02 00:19:10 crc kubenswrapper[4766]: I0102 00:19:10.878135 4766 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-kxc6w" Jan 02 00:19:10 crc kubenswrapper[4766]: I0102 00:19:10.883513 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-2lsdf"] Jan 02 00:19:10 crc kubenswrapper[4766]: I0102 00:19:10.921152 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpcl4\" (UniqueName: \"kubernetes.io/projected/8384b8ff-68e8-469b-8090-f5b763331396-kube-api-access-zpcl4\") pod \"cert-manager-86cb77c54b-2lsdf\" (UID: \"8384b8ff-68e8-469b-8090-f5b763331396\") " pod="cert-manager/cert-manager-86cb77c54b-2lsdf" Jan 02 00:19:10 crc kubenswrapper[4766]: I0102 00:19:10.921235 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8384b8ff-68e8-469b-8090-f5b763331396-bound-sa-token\") pod \"cert-manager-86cb77c54b-2lsdf\" (UID: \"8384b8ff-68e8-469b-8090-f5b763331396\") " pod="cert-manager/cert-manager-86cb77c54b-2lsdf" Jan 02 00:19:11 crc kubenswrapper[4766]: I0102 00:19:11.021837 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpcl4\" (UniqueName: \"kubernetes.io/projected/8384b8ff-68e8-469b-8090-f5b763331396-kube-api-access-zpcl4\") pod \"cert-manager-86cb77c54b-2lsdf\" (UID: \"8384b8ff-68e8-469b-8090-f5b763331396\") " pod="cert-manager/cert-manager-86cb77c54b-2lsdf" Jan 02 00:19:11 crc kubenswrapper[4766]: I0102 00:19:11.021916 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8384b8ff-68e8-469b-8090-f5b763331396-bound-sa-token\") pod \"cert-manager-86cb77c54b-2lsdf\" (UID: \"8384b8ff-68e8-469b-8090-f5b763331396\") " pod="cert-manager/cert-manager-86cb77c54b-2lsdf" Jan 02 00:19:11 crc kubenswrapper[4766]: I0102 00:19:11.047075 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpcl4\" (UniqueName: \"kubernetes.io/projected/8384b8ff-68e8-469b-8090-f5b763331396-kube-api-access-zpcl4\") pod \"cert-manager-86cb77c54b-2lsdf\" (UID: \"8384b8ff-68e8-469b-8090-f5b763331396\") " pod="cert-manager/cert-manager-86cb77c54b-2lsdf" Jan 02 00:19:11 crc kubenswrapper[4766]: I0102 00:19:11.049948 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8384b8ff-68e8-469b-8090-f5b763331396-bound-sa-token\") pod \"cert-manager-86cb77c54b-2lsdf\" (UID: \"8384b8ff-68e8-469b-8090-f5b763331396\") " pod="cert-manager/cert-manager-86cb77c54b-2lsdf" Jan 02 00:19:11 crc kubenswrapper[4766]: I0102 00:19:11.225574 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-2lsdf" Jan 02 00:19:12 crc kubenswrapper[4766]: I0102 00:19:12.902366 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/elasticsearch-es-default-0" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.035927 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-2-build_7c3d16a7-b6e1-4bbc-b8de-004574d80d7e/git-clone/0.log" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.036271 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.202421 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-builder-dockercfg-zc2kc-pull\") pod \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.202880 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-buildworkdir\") pod \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.202953 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-container-storage-root\") pod \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.202993 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-proxy-ca-bundles\") pod \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.203020 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-builder-dockercfg-zc2kc-push\") pod \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.203115 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-buildcachedir\") pod \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.203198 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-system-configs\") pod \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.203225 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-container-storage-run\") pod \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.203252 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbtns\" (UniqueName: \"kubernetes.io/projected/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-kube-api-access-vbtns\") pod \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.203282 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-blob-cache\") pod \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.203311 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-ca-bundles\") pod \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.203346 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-node-pullsecrets\") pod \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\" (UID: \"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e\") " Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.203673 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" (UID: "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.203963 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" (UID: "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.204397 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" (UID: "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.204586 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" (UID: "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.204684 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" (UID: "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.204721 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" (UID: "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.205030 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" (UID: "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.205169 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" (UID: "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.206532 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" (UID: "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.208166 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-builder-dockercfg-zc2kc-pull" (OuterVolumeSpecName: "builder-dockercfg-zc2kc-pull") pod "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" (UID: "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e"). InnerVolumeSpecName "builder-dockercfg-zc2kc-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.209102 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-builder-dockercfg-zc2kc-push" (OuterVolumeSpecName: "builder-dockercfg-zc2kc-push") pod "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" (UID: "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e"). InnerVolumeSpecName "builder-dockercfg-zc2kc-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.224625 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-kube-api-access-vbtns" (OuterVolumeSpecName: "kube-api-access-vbtns") pod "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" (UID: "7c3d16a7-b6e1-4bbc-b8de-004574d80d7e"). InnerVolumeSpecName "kube-api-access-vbtns". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.267791 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-2lsdf"] Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.305267 4766 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.305300 4766 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.305328 4766 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.305336 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbtns\" (UniqueName: \"kubernetes.io/projected/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-kube-api-access-vbtns\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.305345 4766 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.305353 4766 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.305361 4766 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.305371 4766 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-builder-dockercfg-zc2kc-pull\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.305380 4766 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.305387 4766 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.305411 4766 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.305419 4766 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e-builder-dockercfg-zc2kc-push\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.368804 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-59nnv" event={"ID":"16413439-0e97-4c36-996e-5b24f45244c3","Type":"ContainerStarted","Data":"d8d28db359bc67c73ce2845369c048d323abde78750eb508e32647197ddf589c"} Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.370650 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-2lsdf" event={"ID":"8384b8ff-68e8-469b-8090-f5b763331396","Type":"ContainerStarted","Data":"354f734ec396617649aae2db8dc141bd2d5f039e9b4c669141928233d08b3b91"} Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.372168 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-vcn6h" event={"ID":"c35a05ea-0e51-4237-a1a2-d3963f7a9512","Type":"ContainerStarted","Data":"f712aff7d1656a52c04076c4b3a20455ec76e59eb7027a3f54cc63e207a0b616"} Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.372301 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-vcn6h" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.373419 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-2-build_7c3d16a7-b6e1-4bbc-b8de-004574d80d7e/git-clone/0.log" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.373459 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"7c3d16a7-b6e1-4bbc-b8de-004574d80d7e","Type":"ContainerDied","Data":"682605bb7a699f43e537bd913a3a2aa0e9839c3eecb97c2b62531bbadfac9f4b"} Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.373488 4766 scope.go:117] "RemoveContainer" containerID="e99639862fc7bc920244be7b4f81eaf7f6dee5da669052692eee463549347e3e" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.373614 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.399514 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-59nnv" podStartSLOduration=12.491820526 podStartE2EDuration="23.399489013s" podCreationTimestamp="2026-01-02 00:18:52 +0000 UTC" firstStartedPulling="2026-01-02 00:19:04.165326983 +0000 UTC m=+718.963814137" lastFinishedPulling="2026-01-02 00:19:15.07299547 +0000 UTC m=+729.871482624" observedRunningTime="2026-01-02 00:19:15.390613251 +0000 UTC m=+730.189100435" watchObservedRunningTime="2026-01-02 00:19:15.399489013 +0000 UTC m=+730.197976157" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.420501 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.426591 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.449830 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-vcn6h" podStartSLOduration=9.098904098 podStartE2EDuration="19.449812667s" podCreationTimestamp="2026-01-02 00:18:56 +0000 UTC" firstStartedPulling="2026-01-02 00:19:04.779505709 +0000 UTC m=+719.577992863" lastFinishedPulling="2026-01-02 00:19:15.130414278 +0000 UTC m=+729.928901432" observedRunningTime="2026-01-02 00:19:15.441109159 +0000 UTC m=+730.239596313" watchObservedRunningTime="2026-01-02 00:19:15.449812667 +0000 UTC m=+730.248299821" Jan 02 00:19:15 crc kubenswrapper[4766]: I0102 00:19:15.542195 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" path="/var/lib/kubelet/pods/7c3d16a7-b6e1-4bbc-b8de-004574d80d7e/volumes" Jan 02 00:19:16 crc kubenswrapper[4766]: I0102 00:19:16.380566 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-2lsdf" event={"ID":"8384b8ff-68e8-469b-8090-f5b763331396","Type":"ContainerStarted","Data":"99b88f15721977b5c6569fec525cb99d0770f265298391de77ae2c4ac718b40d"} Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.191450 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-2lsdf" podStartSLOduration=9.191424959 podStartE2EDuration="9.191424959s" podCreationTimestamp="2026-01-02 00:19:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-02 00:19:16.402951406 +0000 UTC m=+731.201438570" watchObservedRunningTime="2026-01-02 00:19:19.191424959 +0000 UTC m=+733.989912153" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.195082 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-3-build"] Jan 02 00:19:19 crc kubenswrapper[4766]: E0102 00:19:19.195604 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" containerName="git-clone" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.195627 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" containerName="git-clone" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.195826 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c3d16a7-b6e1-4bbc-b8de-004574d80d7e" containerName="git-clone" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.197344 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.199017 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-3-sys-config" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.200070 4766 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-zc2kc" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.200292 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-3-ca" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.200458 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-3-global-ca" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.243176 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-3-build"] Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.361649 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-system-configs\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.361698 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-container-storage-run\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.361783 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/4fbd3dec-8864-42e8-9978-06d06863d151-builder-dockercfg-zc2kc-push\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.361859 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-container-storage-root\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.361885 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-buildworkdir\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.361901 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/4fbd3dec-8864-42e8-9978-06d06863d151-buildcachedir\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.361918 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/4fbd3dec-8864-42e8-9978-06d06863d151-builder-dockercfg-zc2kc-pull\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.361971 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4fbd3dec-8864-42e8-9978-06d06863d151-node-pullsecrets\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.361988 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-ca-bundles\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.362019 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-proxy-ca-bundles\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.362077 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmbvb\" (UniqueName: \"kubernetes.io/projected/4fbd3dec-8864-42e8-9978-06d06863d151-kube-api-access-nmbvb\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.362098 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-build-blob-cache\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.463242 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/4fbd3dec-8864-42e8-9978-06d06863d151-builder-dockercfg-zc2kc-push\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.463614 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-container-storage-root\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.463768 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-buildworkdir\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.463945 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/4fbd3dec-8864-42e8-9978-06d06863d151-buildcachedir\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.464037 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/4fbd3dec-8864-42e8-9978-06d06863d151-buildcachedir\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.464106 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-container-storage-root\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.464271 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/4fbd3dec-8864-42e8-9978-06d06863d151-builder-dockercfg-zc2kc-pull\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.464437 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4fbd3dec-8864-42e8-9978-06d06863d151-node-pullsecrets\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.464558 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4fbd3dec-8864-42e8-9978-06d06863d151-node-pullsecrets\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.464641 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-buildworkdir\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.464670 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-ca-bundles\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.464917 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-proxy-ca-bundles\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.465166 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmbvb\" (UniqueName: \"kubernetes.io/projected/4fbd3dec-8864-42e8-9978-06d06863d151-kube-api-access-nmbvb\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.465279 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-build-blob-cache\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.465400 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-system-configs\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.465504 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-container-storage-run\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.465668 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-ca-bundles\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.465901 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-build-blob-cache\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.466457 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-proxy-ca-bundles\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.468314 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-system-configs\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.468570 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-container-storage-run\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.468627 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/4fbd3dec-8864-42e8-9978-06d06863d151-builder-dockercfg-zc2kc-push\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.468801 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/4fbd3dec-8864-42e8-9978-06d06863d151-builder-dockercfg-zc2kc-pull\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.483447 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmbvb\" (UniqueName: \"kubernetes.io/projected/4fbd3dec-8864-42e8-9978-06d06863d151-kube-api-access-nmbvb\") pod \"service-telemetry-operator-3-build\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:19 crc kubenswrapper[4766]: I0102 00:19:19.517964 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:20 crc kubenswrapper[4766]: I0102 00:19:20.146331 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-3-build"] Jan 02 00:19:20 crc kubenswrapper[4766]: I0102 00:19:20.407004 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-3-build" event={"ID":"4fbd3dec-8864-42e8-9978-06d06863d151","Type":"ContainerStarted","Data":"cc82bc52a32fb1bb050032819ea8e08a1b74b2c83cd40d07e75c193fb102d8c1"} Jan 02 00:19:21 crc kubenswrapper[4766]: I0102 00:19:21.414974 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-3-build" event={"ID":"4fbd3dec-8864-42e8-9978-06d06863d151","Type":"ContainerStarted","Data":"451f7325580b55e6f3c91c6a533b4e26db2c5663530607081dfc1410fb68862e"} Jan 02 00:19:21 crc kubenswrapper[4766]: E0102 00:19:21.468969 4766 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=6859640842973264640, SKID=, AKID=7F:76:81:70:19:3C:24:D8:32:FA:EF:88:D7:8C:10:8B:ED:75:04:FC failed: x509: certificate signed by unknown authority" Jan 02 00:19:22 crc kubenswrapper[4766]: I0102 00:19:22.234313 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-vcn6h" Jan 02 00:19:22 crc kubenswrapper[4766]: I0102 00:19:22.505865 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-3-build"] Jan 02 00:19:23 crc kubenswrapper[4766]: I0102 00:19:23.428029 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-operator-3-build" podUID="4fbd3dec-8864-42e8-9978-06d06863d151" containerName="git-clone" containerID="cri-o://451f7325580b55e6f3c91c6a533b4e26db2c5663530607081dfc1410fb68862e" gracePeriod=30 Jan 02 00:19:24 crc kubenswrapper[4766]: I0102 00:19:24.180540 4766 patch_prober.go:28] interesting pod/machine-config-daemon-hlsf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 02 00:19:24 crc kubenswrapper[4766]: I0102 00:19:24.180599 4766 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 02 00:19:24 crc kubenswrapper[4766]: I0102 00:19:24.436508 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-3-build_4fbd3dec-8864-42e8-9978-06d06863d151/git-clone/0.log" Jan 02 00:19:24 crc kubenswrapper[4766]: I0102 00:19:24.436563 4766 generic.go:334] "Generic (PLEG): container finished" podID="4fbd3dec-8864-42e8-9978-06d06863d151" containerID="451f7325580b55e6f3c91c6a533b4e26db2c5663530607081dfc1410fb68862e" exitCode=1 Jan 02 00:19:24 crc kubenswrapper[4766]: I0102 00:19:24.436596 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-3-build" event={"ID":"4fbd3dec-8864-42e8-9978-06d06863d151","Type":"ContainerDied","Data":"451f7325580b55e6f3c91c6a533b4e26db2c5663530607081dfc1410fb68862e"} Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.890651 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-3-build_4fbd3dec-8864-42e8-9978-06d06863d151/git-clone/0.log" Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.891265 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.968981 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-ca-bundles\") pod \"4fbd3dec-8864-42e8-9978-06d06863d151\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969037 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4fbd3dec-8864-42e8-9978-06d06863d151-node-pullsecrets\") pod \"4fbd3dec-8864-42e8-9978-06d06863d151\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969058 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-system-configs\") pod \"4fbd3dec-8864-42e8-9978-06d06863d151\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969092 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/4fbd3dec-8864-42e8-9978-06d06863d151-builder-dockercfg-zc2kc-push\") pod \"4fbd3dec-8864-42e8-9978-06d06863d151\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969110 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-build-blob-cache\") pod \"4fbd3dec-8864-42e8-9978-06d06863d151\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969135 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-proxy-ca-bundles\") pod \"4fbd3dec-8864-42e8-9978-06d06863d151\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969168 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-buildworkdir\") pod \"4fbd3dec-8864-42e8-9978-06d06863d151\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969189 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/4fbd3dec-8864-42e8-9978-06d06863d151-builder-dockercfg-zc2kc-pull\") pod \"4fbd3dec-8864-42e8-9978-06d06863d151\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969205 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-container-storage-run\") pod \"4fbd3dec-8864-42e8-9978-06d06863d151\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969215 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4fbd3dec-8864-42e8-9978-06d06863d151-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "4fbd3dec-8864-42e8-9978-06d06863d151" (UID: "4fbd3dec-8864-42e8-9978-06d06863d151"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969235 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-container-storage-root\") pod \"4fbd3dec-8864-42e8-9978-06d06863d151\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969265 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/4fbd3dec-8864-42e8-9978-06d06863d151-buildcachedir\") pod \"4fbd3dec-8864-42e8-9978-06d06863d151\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969300 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmbvb\" (UniqueName: \"kubernetes.io/projected/4fbd3dec-8864-42e8-9978-06d06863d151-kube-api-access-nmbvb\") pod \"4fbd3dec-8864-42e8-9978-06d06863d151\" (UID: \"4fbd3dec-8864-42e8-9978-06d06863d151\") " Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969488 4766 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4fbd3dec-8864-42e8-9978-06d06863d151-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969587 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "4fbd3dec-8864-42e8-9978-06d06863d151" (UID: "4fbd3dec-8864-42e8-9978-06d06863d151"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969869 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "4fbd3dec-8864-42e8-9978-06d06863d151" (UID: "4fbd3dec-8864-42e8-9978-06d06863d151"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969894 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "4fbd3dec-8864-42e8-9978-06d06863d151" (UID: "4fbd3dec-8864-42e8-9978-06d06863d151"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969902 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4fbd3dec-8864-42e8-9978-06d06863d151-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "4fbd3dec-8864-42e8-9978-06d06863d151" (UID: "4fbd3dec-8864-42e8-9978-06d06863d151"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.969943 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "4fbd3dec-8864-42e8-9978-06d06863d151" (UID: "4fbd3dec-8864-42e8-9978-06d06863d151"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.970056 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "4fbd3dec-8864-42e8-9978-06d06863d151" (UID: "4fbd3dec-8864-42e8-9978-06d06863d151"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.970108 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "4fbd3dec-8864-42e8-9978-06d06863d151" (UID: "4fbd3dec-8864-42e8-9978-06d06863d151"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.970365 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "4fbd3dec-8864-42e8-9978-06d06863d151" (UID: "4fbd3dec-8864-42e8-9978-06d06863d151"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.974257 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fbd3dec-8864-42e8-9978-06d06863d151-builder-dockercfg-zc2kc-pull" (OuterVolumeSpecName: "builder-dockercfg-zc2kc-pull") pod "4fbd3dec-8864-42e8-9978-06d06863d151" (UID: "4fbd3dec-8864-42e8-9978-06d06863d151"). InnerVolumeSpecName "builder-dockercfg-zc2kc-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.974494 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fbd3dec-8864-42e8-9978-06d06863d151-builder-dockercfg-zc2kc-push" (OuterVolumeSpecName: "builder-dockercfg-zc2kc-push") pod "4fbd3dec-8864-42e8-9978-06d06863d151" (UID: "4fbd3dec-8864-42e8-9978-06d06863d151"). InnerVolumeSpecName "builder-dockercfg-zc2kc-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:19:26 crc kubenswrapper[4766]: I0102 00:19:26.974521 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fbd3dec-8864-42e8-9978-06d06863d151-kube-api-access-nmbvb" (OuterVolumeSpecName: "kube-api-access-nmbvb") pod "4fbd3dec-8864-42e8-9978-06d06863d151" (UID: "4fbd3dec-8864-42e8-9978-06d06863d151"). InnerVolumeSpecName "kube-api-access-nmbvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.070776 4766 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.070813 4766 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/4fbd3dec-8864-42e8-9978-06d06863d151-builder-dockercfg-zc2kc-pull\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.070824 4766 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.070833 4766 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.070859 4766 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/4fbd3dec-8864-42e8-9978-06d06863d151-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.070868 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmbvb\" (UniqueName: \"kubernetes.io/projected/4fbd3dec-8864-42e8-9978-06d06863d151-kube-api-access-nmbvb\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.070876 4766 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.070884 4766 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.070893 4766 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/4fbd3dec-8864-42e8-9978-06d06863d151-builder-dockercfg-zc2kc-push\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.070902 4766 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/4fbd3dec-8864-42e8-9978-06d06863d151-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.070912 4766 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4fbd3dec-8864-42e8-9978-06d06863d151-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.457029 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-3-build_4fbd3dec-8864-42e8-9978-06d06863d151/git-clone/0.log" Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.457141 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-3-build" event={"ID":"4fbd3dec-8864-42e8-9978-06d06863d151","Type":"ContainerDied","Data":"cc82bc52a32fb1bb050032819ea8e08a1b74b2c83cd40d07e75c193fb102d8c1"} Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.457193 4766 scope.go:117] "RemoveContainer" containerID="451f7325580b55e6f3c91c6a533b4e26db2c5663530607081dfc1410fb68862e" Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.457242 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-3-build" Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.502781 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-3-build"] Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.507269 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-operator-3-build"] Jan 02 00:19:27 crc kubenswrapper[4766]: I0102 00:19:27.541927 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fbd3dec-8864-42e8-9978-06d06863d151" path="/var/lib/kubelet/pods/4fbd3dec-8864-42e8-9978-06d06863d151/volumes" Jan 02 00:19:33 crc kubenswrapper[4766]: I0102 00:19:33.970774 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-4-build"] Jan 02 00:19:33 crc kubenswrapper[4766]: E0102 00:19:33.971619 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fbd3dec-8864-42e8-9978-06d06863d151" containerName="git-clone" Jan 02 00:19:33 crc kubenswrapper[4766]: I0102 00:19:33.971634 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fbd3dec-8864-42e8-9978-06d06863d151" containerName="git-clone" Jan 02 00:19:33 crc kubenswrapper[4766]: I0102 00:19:33.971766 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fbd3dec-8864-42e8-9978-06d06863d151" containerName="git-clone" Jan 02 00:19:33 crc kubenswrapper[4766]: I0102 00:19:33.972770 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:33 crc kubenswrapper[4766]: I0102 00:19:33.974669 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-4-ca" Jan 02 00:19:33 crc kubenswrapper[4766]: I0102 00:19:33.974944 4766 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-zc2kc" Jan 02 00:19:33 crc kubenswrapper[4766]: I0102 00:19:33.975111 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-4-global-ca" Jan 02 00:19:33 crc kubenswrapper[4766]: I0102 00:19:33.975080 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-4-sys-config" Jan 02 00:19:33 crc kubenswrapper[4766]: I0102 00:19:33.993345 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-4-build"] Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.079888 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swnrl\" (UniqueName: \"kubernetes.io/projected/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-kube-api-access-swnrl\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.080024 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-ca-bundles\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.080074 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-container-storage-run\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.080102 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-system-configs\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.080142 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-builder-dockercfg-zc2kc-push\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.080169 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-node-pullsecrets\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.080259 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-blob-cache\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.080309 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-builder-dockercfg-zc2kc-pull\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.080351 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-container-storage-root\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.080430 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-proxy-ca-bundles\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.080466 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-buildcachedir\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.080510 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-buildworkdir\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.181448 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-container-storage-run\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.181524 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-system-configs\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.181579 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-builder-dockercfg-zc2kc-push\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.181618 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-node-pullsecrets\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.181658 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-blob-cache\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.181698 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-builder-dockercfg-zc2kc-pull\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.181734 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-container-storage-root\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.181785 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-proxy-ca-bundles\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.181825 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-buildcachedir\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.181916 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-buildworkdir\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.181994 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swnrl\" (UniqueName: \"kubernetes.io/projected/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-kube-api-access-swnrl\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.182084 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-ca-bundles\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.182267 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-container-storage-run\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.182450 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-container-storage-root\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.182168 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-buildcachedir\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.182554 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-node-pullsecrets\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.182666 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-buildworkdir\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.182732 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-system-configs\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.182980 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-proxy-ca-bundles\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.183147 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-blob-cache\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.183183 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-ca-bundles\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.187563 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-builder-dockercfg-zc2kc-pull\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.188648 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-builder-dockercfg-zc2kc-push\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.202076 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swnrl\" (UniqueName: \"kubernetes.io/projected/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-kube-api-access-swnrl\") pod \"service-telemetry-operator-4-build\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.285672 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:34 crc kubenswrapper[4766]: I0102 00:19:34.507155 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-4-build"] Jan 02 00:19:34 crc kubenswrapper[4766]: W0102 00:19:34.513071 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod733cfa1a_d99b_468c_9e7c_6a2e856ceb6c.slice/crio-5595f2a1f29e71d3f0094c0521699cfd69fb29e10cff21ced186c5b1b10372a2 WatchSource:0}: Error finding container 5595f2a1f29e71d3f0094c0521699cfd69fb29e10cff21ced186c5b1b10372a2: Status 404 returned error can't find the container with id 5595f2a1f29e71d3f0094c0521699cfd69fb29e10cff21ced186c5b1b10372a2 Jan 02 00:19:35 crc kubenswrapper[4766]: I0102 00:19:35.519422 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-4-build" event={"ID":"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c","Type":"ContainerStarted","Data":"b2fe5866d0cf3e6ec5ab74b92699694c1e904a080ff5499eb633a4c920317d2e"} Jan 02 00:19:35 crc kubenswrapper[4766]: I0102 00:19:35.519925 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-4-build" event={"ID":"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c","Type":"ContainerStarted","Data":"5595f2a1f29e71d3f0094c0521699cfd69fb29e10cff21ced186c5b1b10372a2"} Jan 02 00:19:35 crc kubenswrapper[4766]: E0102 00:19:35.589471 4766 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=6859640842973264640, SKID=, AKID=7F:76:81:70:19:3C:24:D8:32:FA:EF:88:D7:8C:10:8B:ED:75:04:FC failed: x509: certificate signed by unknown authority" Jan 02 00:19:36 crc kubenswrapper[4766]: I0102 00:19:36.615495 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-4-build"] Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.533590 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-operator-4-build" podUID="733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" containerName="git-clone" containerID="cri-o://b2fe5866d0cf3e6ec5ab74b92699694c1e904a080ff5499eb633a4c920317d2e" gracePeriod=30 Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.888042 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-4-build_733cfa1a-d99b-468c-9e7c-6a2e856ceb6c/git-clone/0.log" Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.888117 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.947463 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swnrl\" (UniqueName: \"kubernetes.io/projected/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-kube-api-access-swnrl\") pod \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.947517 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-builder-dockercfg-zc2kc-pull\") pod \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.947545 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-blob-cache\") pod \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.947569 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-buildworkdir\") pod \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.947589 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-container-storage-run\") pod \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.947912 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" (UID: "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.948013 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" (UID: "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.948659 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" (UID: "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.948706 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-node-pullsecrets\") pod \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.948764 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-builder-dockercfg-zc2kc-push\") pod \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.948795 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-ca-bundles\") pod \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.948822 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-system-configs\") pod \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.948872 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-buildcachedir\") pod \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.948895 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-container-storage-root\") pod \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.948936 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-proxy-ca-bundles\") pod \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\" (UID: \"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c\") " Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.949554 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" (UID: "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.949619 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" (UID: "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.949737 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" (UID: "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.949872 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" (UID: "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.950136 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" (UID: "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.950267 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" (UID: "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.953810 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-kube-api-access-swnrl" (OuterVolumeSpecName: "kube-api-access-swnrl") pod "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" (UID: "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c"). InnerVolumeSpecName "kube-api-access-swnrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.953993 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-builder-dockercfg-zc2kc-push" (OuterVolumeSpecName: "builder-dockercfg-zc2kc-push") pod "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" (UID: "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c"). InnerVolumeSpecName "builder-dockercfg-zc2kc-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:19:37 crc kubenswrapper[4766]: I0102 00:19:37.954100 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-builder-dockercfg-zc2kc-pull" (OuterVolumeSpecName: "builder-dockercfg-zc2kc-pull") pod "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" (UID: "733cfa1a-d99b-468c-9e7c-6a2e856ceb6c"). InnerVolumeSpecName "builder-dockercfg-zc2kc-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.057376 4766 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.057701 4766 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-builder-dockercfg-zc2kc-push\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.057714 4766 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.057722 4766 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.057730 4766 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.057746 4766 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.057754 4766 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.057762 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swnrl\" (UniqueName: \"kubernetes.io/projected/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-kube-api-access-swnrl\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.057771 4766 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-builder-dockercfg-zc2kc-pull\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.057785 4766 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.057795 4766 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.057803 4766 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.544237 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-4-build_733cfa1a-d99b-468c-9e7c-6a2e856ceb6c/git-clone/0.log" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.544304 4766 generic.go:334] "Generic (PLEG): container finished" podID="733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" containerID="b2fe5866d0cf3e6ec5ab74b92699694c1e904a080ff5499eb633a4c920317d2e" exitCode=1 Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.544346 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-4-build" event={"ID":"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c","Type":"ContainerDied","Data":"b2fe5866d0cf3e6ec5ab74b92699694c1e904a080ff5499eb633a4c920317d2e"} Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.544389 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-4-build" event={"ID":"733cfa1a-d99b-468c-9e7c-6a2e856ceb6c","Type":"ContainerDied","Data":"5595f2a1f29e71d3f0094c0521699cfd69fb29e10cff21ced186c5b1b10372a2"} Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.544416 4766 scope.go:117] "RemoveContainer" containerID="b2fe5866d0cf3e6ec5ab74b92699694c1e904a080ff5499eb633a4c920317d2e" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.544422 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-4-build" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.577155 4766 scope.go:117] "RemoveContainer" containerID="b2fe5866d0cf3e6ec5ab74b92699694c1e904a080ff5499eb633a4c920317d2e" Jan 02 00:19:38 crc kubenswrapper[4766]: E0102 00:19:38.577687 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2fe5866d0cf3e6ec5ab74b92699694c1e904a080ff5499eb633a4c920317d2e\": container with ID starting with b2fe5866d0cf3e6ec5ab74b92699694c1e904a080ff5499eb633a4c920317d2e not found: ID does not exist" containerID="b2fe5866d0cf3e6ec5ab74b92699694c1e904a080ff5499eb633a4c920317d2e" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.577722 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2fe5866d0cf3e6ec5ab74b92699694c1e904a080ff5499eb633a4c920317d2e"} err="failed to get container status \"b2fe5866d0cf3e6ec5ab74b92699694c1e904a080ff5499eb633a4c920317d2e\": rpc error: code = NotFound desc = could not find container \"b2fe5866d0cf3e6ec5ab74b92699694c1e904a080ff5499eb633a4c920317d2e\": container with ID starting with b2fe5866d0cf3e6ec5ab74b92699694c1e904a080ff5499eb633a4c920317d2e not found: ID does not exist" Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.595361 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-4-build"] Jan 02 00:19:38 crc kubenswrapper[4766]: I0102 00:19:38.605572 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-operator-4-build"] Jan 02 00:19:39 crc kubenswrapper[4766]: I0102 00:19:39.547759 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" path="/var/lib/kubelet/pods/733cfa1a-d99b-468c-9e7c-6a2e856ceb6c/volumes" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.145601 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-5-build"] Jan 02 00:19:48 crc kubenswrapper[4766]: E0102 00:19:48.147339 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" containerName="git-clone" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.147374 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" containerName="git-clone" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.147515 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="733cfa1a-d99b-468c-9e7c-6a2e856ceb6c" containerName="git-clone" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.148709 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.151673 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-5-sys-config" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.152230 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-5-ca" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.152382 4766 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-zc2kc" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.152492 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-5-global-ca" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.177215 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-5-build"] Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.206754 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rx4t\" (UniqueName: \"kubernetes.io/projected/9b513017-a533-4f18-990b-fcd29f516c88-kube-api-access-4rx4t\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.207115 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-container-storage-root\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.207242 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/9b513017-a533-4f18-990b-fcd29f516c88-builder-dockercfg-zc2kc-push\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.207352 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-ca-bundles\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.207500 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-build-blob-cache\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.207603 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/9b513017-a533-4f18-990b-fcd29f516c88-node-pullsecrets\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.207704 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/9b513017-a533-4f18-990b-fcd29f516c88-buildcachedir\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.207818 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-proxy-ca-bundles\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.207966 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-buildworkdir\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.208160 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/9b513017-a533-4f18-990b-fcd29f516c88-builder-dockercfg-zc2kc-pull\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.208298 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-system-configs\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.208452 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-container-storage-run\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.310481 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-build-blob-cache\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.310538 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/9b513017-a533-4f18-990b-fcd29f516c88-node-pullsecrets\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.310576 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/9b513017-a533-4f18-990b-fcd29f516c88-buildcachedir\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.310654 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-proxy-ca-bundles\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.310720 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/9b513017-a533-4f18-990b-fcd29f516c88-node-pullsecrets\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.310743 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/9b513017-a533-4f18-990b-fcd29f516c88-buildcachedir\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.310796 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-buildworkdir\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.311035 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-build-blob-cache\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.311203 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-buildworkdir\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.311271 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-system-configs\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.311304 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/9b513017-a533-4f18-990b-fcd29f516c88-builder-dockercfg-zc2kc-pull\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.311330 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-container-storage-run\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.311367 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rx4t\" (UniqueName: \"kubernetes.io/projected/9b513017-a533-4f18-990b-fcd29f516c88-kube-api-access-4rx4t\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.311401 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-container-storage-root\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.311428 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/9b513017-a533-4f18-990b-fcd29f516c88-builder-dockercfg-zc2kc-push\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.311456 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-ca-bundles\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.311827 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-proxy-ca-bundles\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.312305 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-container-storage-run\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.312376 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-ca-bundles\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.312953 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-system-configs\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.313003 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-container-storage-root\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.318768 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/9b513017-a533-4f18-990b-fcd29f516c88-builder-dockercfg-zc2kc-push\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.319053 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/9b513017-a533-4f18-990b-fcd29f516c88-builder-dockercfg-zc2kc-pull\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.337707 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rx4t\" (UniqueName: \"kubernetes.io/projected/9b513017-a533-4f18-990b-fcd29f516c88-kube-api-access-4rx4t\") pod \"service-telemetry-operator-5-build\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:48 crc kubenswrapper[4766]: I0102 00:19:48.483425 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:49 crc kubenswrapper[4766]: I0102 00:19:49.026498 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-5-build"] Jan 02 00:19:49 crc kubenswrapper[4766]: W0102 00:19:49.043295 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b513017_a533_4f18_990b_fcd29f516c88.slice/crio-26e4578b14d0a58c84d9875374838a6c855315dc91f60b91b1a73c317976d716 WatchSource:0}: Error finding container 26e4578b14d0a58c84d9875374838a6c855315dc91f60b91b1a73c317976d716: Status 404 returned error can't find the container with id 26e4578b14d0a58c84d9875374838a6c855315dc91f60b91b1a73c317976d716 Jan 02 00:19:49 crc kubenswrapper[4766]: I0102 00:19:49.649644 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-5-build" event={"ID":"9b513017-a533-4f18-990b-fcd29f516c88","Type":"ContainerStarted","Data":"66886eb275026523b5bb9eb8f35e6fb7dc613bc4c25301d1981e7d16856019e6"} Jan 02 00:19:49 crc kubenswrapper[4766]: I0102 00:19:49.650146 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-5-build" event={"ID":"9b513017-a533-4f18-990b-fcd29f516c88","Type":"ContainerStarted","Data":"26e4578b14d0a58c84d9875374838a6c855315dc91f60b91b1a73c317976d716"} Jan 02 00:19:49 crc kubenswrapper[4766]: E0102 00:19:49.715729 4766 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=6859640842973264640, SKID=, AKID=7F:76:81:70:19:3C:24:D8:32:FA:EF:88:D7:8C:10:8B:ED:75:04:FC failed: x509: certificate signed by unknown authority" Jan 02 00:19:50 crc kubenswrapper[4766]: I0102 00:19:50.049116 4766 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 02 00:19:50 crc kubenswrapper[4766]: I0102 00:19:50.751442 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-5-build"] Jan 02 00:19:51 crc kubenswrapper[4766]: I0102 00:19:51.665280 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-operator-5-build" podUID="9b513017-a533-4f18-990b-fcd29f516c88" containerName="git-clone" containerID="cri-o://66886eb275026523b5bb9eb8f35e6fb7dc613bc4c25301d1981e7d16856019e6" gracePeriod=30 Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.466940 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-5-build_9b513017-a533-4f18-990b-fcd29f516c88/git-clone/0.log" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.467359 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573001 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/9b513017-a533-4f18-990b-fcd29f516c88-buildcachedir\") pod \"9b513017-a533-4f18-990b-fcd29f516c88\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573079 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-buildworkdir\") pod \"9b513017-a533-4f18-990b-fcd29f516c88\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573112 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-ca-bundles\") pod \"9b513017-a533-4f18-990b-fcd29f516c88\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573135 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b513017-a533-4f18-990b-fcd29f516c88-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "9b513017-a533-4f18-990b-fcd29f516c88" (UID: "9b513017-a533-4f18-990b-fcd29f516c88"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573145 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-container-storage-run\") pod \"9b513017-a533-4f18-990b-fcd29f516c88\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573168 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-proxy-ca-bundles\") pod \"9b513017-a533-4f18-990b-fcd29f516c88\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573186 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/9b513017-a533-4f18-990b-fcd29f516c88-builder-dockercfg-zc2kc-push\") pod \"9b513017-a533-4f18-990b-fcd29f516c88\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573205 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/9b513017-a533-4f18-990b-fcd29f516c88-node-pullsecrets\") pod \"9b513017-a533-4f18-990b-fcd29f516c88\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573250 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rx4t\" (UniqueName: \"kubernetes.io/projected/9b513017-a533-4f18-990b-fcd29f516c88-kube-api-access-4rx4t\") pod \"9b513017-a533-4f18-990b-fcd29f516c88\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573267 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-build-blob-cache\") pod \"9b513017-a533-4f18-990b-fcd29f516c88\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573287 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/9b513017-a533-4f18-990b-fcd29f516c88-builder-dockercfg-zc2kc-pull\") pod \"9b513017-a533-4f18-990b-fcd29f516c88\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573312 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-system-configs\") pod \"9b513017-a533-4f18-990b-fcd29f516c88\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573336 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-container-storage-root\") pod \"9b513017-a533-4f18-990b-fcd29f516c88\" (UID: \"9b513017-a533-4f18-990b-fcd29f516c88\") " Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573476 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "9b513017-a533-4f18-990b-fcd29f516c88" (UID: "9b513017-a533-4f18-990b-fcd29f516c88"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573499 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b513017-a533-4f18-990b-fcd29f516c88-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "9b513017-a533-4f18-990b-fcd29f516c88" (UID: "9b513017-a533-4f18-990b-fcd29f516c88"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573788 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "9b513017-a533-4f18-990b-fcd29f516c88" (UID: "9b513017-a533-4f18-990b-fcd29f516c88"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573920 4766 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573937 4766 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573949 4766 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/9b513017-a533-4f18-990b-fcd29f516c88-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573959 4766 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/9b513017-a533-4f18-990b-fcd29f516c88-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573924 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "9b513017-a533-4f18-990b-fcd29f516c88" (UID: "9b513017-a533-4f18-990b-fcd29f516c88"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.573949 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "9b513017-a533-4f18-990b-fcd29f516c88" (UID: "9b513017-a533-4f18-990b-fcd29f516c88"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.574166 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "9b513017-a533-4f18-990b-fcd29f516c88" (UID: "9b513017-a533-4f18-990b-fcd29f516c88"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.574217 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "9b513017-a533-4f18-990b-fcd29f516c88" (UID: "9b513017-a533-4f18-990b-fcd29f516c88"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.574259 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "9b513017-a533-4f18-990b-fcd29f516c88" (UID: "9b513017-a533-4f18-990b-fcd29f516c88"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.577992 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b513017-a533-4f18-990b-fcd29f516c88-builder-dockercfg-zc2kc-pull" (OuterVolumeSpecName: "builder-dockercfg-zc2kc-pull") pod "9b513017-a533-4f18-990b-fcd29f516c88" (UID: "9b513017-a533-4f18-990b-fcd29f516c88"). InnerVolumeSpecName "builder-dockercfg-zc2kc-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.578633 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b513017-a533-4f18-990b-fcd29f516c88-builder-dockercfg-zc2kc-push" (OuterVolumeSpecName: "builder-dockercfg-zc2kc-push") pod "9b513017-a533-4f18-990b-fcd29f516c88" (UID: "9b513017-a533-4f18-990b-fcd29f516c88"). InnerVolumeSpecName "builder-dockercfg-zc2kc-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.582806 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b513017-a533-4f18-990b-fcd29f516c88-kube-api-access-4rx4t" (OuterVolumeSpecName: "kube-api-access-4rx4t") pod "9b513017-a533-4f18-990b-fcd29f516c88" (UID: "9b513017-a533-4f18-990b-fcd29f516c88"). InnerVolumeSpecName "kube-api-access-4rx4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.675089 4766 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-zc2kc-push\" (UniqueName: \"kubernetes.io/secret/9b513017-a533-4f18-990b-fcd29f516c88-builder-dockercfg-zc2kc-push\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.675126 4766 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.675145 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rx4t\" (UniqueName: \"kubernetes.io/projected/9b513017-a533-4f18-990b-fcd29f516c88-kube-api-access-4rx4t\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.675174 4766 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.675211 4766 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-zc2kc-pull\" (UniqueName: \"kubernetes.io/secret/9b513017-a533-4f18-990b-fcd29f516c88-builder-dockercfg-zc2kc-pull\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.675236 4766 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.675260 4766 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/9b513017-a533-4f18-990b-fcd29f516c88-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.675285 4766 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9b513017-a533-4f18-990b-fcd29f516c88-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.680066 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-5-build_9b513017-a533-4f18-990b-fcd29f516c88/git-clone/0.log" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.680119 4766 generic.go:334] "Generic (PLEG): container finished" podID="9b513017-a533-4f18-990b-fcd29f516c88" containerID="66886eb275026523b5bb9eb8f35e6fb7dc613bc4c25301d1981e7d16856019e6" exitCode=1 Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.680151 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-5-build" event={"ID":"9b513017-a533-4f18-990b-fcd29f516c88","Type":"ContainerDied","Data":"66886eb275026523b5bb9eb8f35e6fb7dc613bc4c25301d1981e7d16856019e6"} Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.680182 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-5-build" event={"ID":"9b513017-a533-4f18-990b-fcd29f516c88","Type":"ContainerDied","Data":"26e4578b14d0a58c84d9875374838a6c855315dc91f60b91b1a73c317976d716"} Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.680200 4766 scope.go:117] "RemoveContainer" containerID="66886eb275026523b5bb9eb8f35e6fb7dc613bc4c25301d1981e7d16856019e6" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.680278 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-5-build" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.714984 4766 scope.go:117] "RemoveContainer" containerID="66886eb275026523b5bb9eb8f35e6fb7dc613bc4c25301d1981e7d16856019e6" Jan 02 00:19:52 crc kubenswrapper[4766]: E0102 00:19:52.716142 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66886eb275026523b5bb9eb8f35e6fb7dc613bc4c25301d1981e7d16856019e6\": container with ID starting with 66886eb275026523b5bb9eb8f35e6fb7dc613bc4c25301d1981e7d16856019e6 not found: ID does not exist" containerID="66886eb275026523b5bb9eb8f35e6fb7dc613bc4c25301d1981e7d16856019e6" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.716212 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66886eb275026523b5bb9eb8f35e6fb7dc613bc4c25301d1981e7d16856019e6"} err="failed to get container status \"66886eb275026523b5bb9eb8f35e6fb7dc613bc4c25301d1981e7d16856019e6\": rpc error: code = NotFound desc = could not find container \"66886eb275026523b5bb9eb8f35e6fb7dc613bc4c25301d1981e7d16856019e6\": container with ID starting with 66886eb275026523b5bb9eb8f35e6fb7dc613bc4c25301d1981e7d16856019e6 not found: ID does not exist" Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.723400 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-5-build"] Jan 02 00:19:52 crc kubenswrapper[4766]: I0102 00:19:52.728707 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-operator-5-build"] Jan 02 00:19:53 crc kubenswrapper[4766]: I0102 00:19:53.540739 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b513017-a533-4f18-990b-fcd29f516c88" path="/var/lib/kubelet/pods/9b513017-a533-4f18-990b-fcd29f516c88/volumes" Jan 02 00:19:54 crc kubenswrapper[4766]: I0102 00:19:54.181190 4766 patch_prober.go:28] interesting pod/machine-config-daemon-hlsf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 02 00:19:54 crc kubenswrapper[4766]: I0102 00:19:54.181711 4766 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 02 00:20:24 crc kubenswrapper[4766]: I0102 00:20:24.180519 4766 patch_prober.go:28] interesting pod/machine-config-daemon-hlsf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 02 00:20:24 crc kubenswrapper[4766]: I0102 00:20:24.181042 4766 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 02 00:20:24 crc kubenswrapper[4766]: I0102 00:20:24.181086 4766 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:20:24 crc kubenswrapper[4766]: I0102 00:20:24.181703 4766 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"690529f4c1451fbb7a4daf40afad9fe675d9ba0699ee41538a56e7145e9d3df2"} pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 02 00:20:24 crc kubenswrapper[4766]: I0102 00:20:24.181780 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" containerID="cri-o://690529f4c1451fbb7a4daf40afad9fe675d9ba0699ee41538a56e7145e9d3df2" gracePeriod=600 Jan 02 00:20:25 crc kubenswrapper[4766]: I0102 00:20:25.031668 4766 generic.go:334] "Generic (PLEG): container finished" podID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerID="690529f4c1451fbb7a4daf40afad9fe675d9ba0699ee41538a56e7145e9d3df2" exitCode=0 Jan 02 00:20:25 crc kubenswrapper[4766]: I0102 00:20:25.031741 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" event={"ID":"a62872f4-c5c6-4f43-ba8a-40e0c96e6968","Type":"ContainerDied","Data":"690529f4c1451fbb7a4daf40afad9fe675d9ba0699ee41538a56e7145e9d3df2"} Jan 02 00:20:25 crc kubenswrapper[4766]: I0102 00:20:25.032090 4766 scope.go:117] "RemoveContainer" containerID="a3a7bcfe3f9b4401d1f157274622d2a7ef5d28b924c1e0978ebb8ff12e7ce91d" Jan 02 00:20:26 crc kubenswrapper[4766]: I0102 00:20:26.039915 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" event={"ID":"a62872f4-c5c6-4f43-ba8a-40e0c96e6968","Type":"ContainerStarted","Data":"f3099eceeb9c369e0ae2f8da79be91ae54a2ea0c86d32b84fd1ed6572982cd8c"} Jan 02 00:20:30 crc kubenswrapper[4766]: I0102 00:20:30.379638 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-bsr5x/must-gather-nclqp"] Jan 02 00:20:30 crc kubenswrapper[4766]: E0102 00:20:30.380631 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b513017-a533-4f18-990b-fcd29f516c88" containerName="git-clone" Jan 02 00:20:30 crc kubenswrapper[4766]: I0102 00:20:30.380652 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b513017-a533-4f18-990b-fcd29f516c88" containerName="git-clone" Jan 02 00:20:30 crc kubenswrapper[4766]: I0102 00:20:30.380913 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b513017-a533-4f18-990b-fcd29f516c88" containerName="git-clone" Jan 02 00:20:30 crc kubenswrapper[4766]: I0102 00:20:30.381978 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bsr5x/must-gather-nclqp" Jan 02 00:20:30 crc kubenswrapper[4766]: I0102 00:20:30.384313 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-bsr5x"/"kube-root-ca.crt" Jan 02 00:20:30 crc kubenswrapper[4766]: I0102 00:20:30.385452 4766 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-bsr5x"/"openshift-service-ca.crt" Jan 02 00:20:30 crc kubenswrapper[4766]: I0102 00:20:30.415734 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-bsr5x/must-gather-nclqp"] Jan 02 00:20:30 crc kubenswrapper[4766]: I0102 00:20:30.470506 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f15bc032-b4e0-46f9-bbac-c36f6776d77b-must-gather-output\") pod \"must-gather-nclqp\" (UID: \"f15bc032-b4e0-46f9-bbac-c36f6776d77b\") " pod="openshift-must-gather-bsr5x/must-gather-nclqp" Jan 02 00:20:30 crc kubenswrapper[4766]: I0102 00:20:30.470573 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwprk\" (UniqueName: \"kubernetes.io/projected/f15bc032-b4e0-46f9-bbac-c36f6776d77b-kube-api-access-mwprk\") pod \"must-gather-nclqp\" (UID: \"f15bc032-b4e0-46f9-bbac-c36f6776d77b\") " pod="openshift-must-gather-bsr5x/must-gather-nclqp" Jan 02 00:20:30 crc kubenswrapper[4766]: I0102 00:20:30.572091 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f15bc032-b4e0-46f9-bbac-c36f6776d77b-must-gather-output\") pod \"must-gather-nclqp\" (UID: \"f15bc032-b4e0-46f9-bbac-c36f6776d77b\") " pod="openshift-must-gather-bsr5x/must-gather-nclqp" Jan 02 00:20:30 crc kubenswrapper[4766]: I0102 00:20:30.572137 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwprk\" (UniqueName: \"kubernetes.io/projected/f15bc032-b4e0-46f9-bbac-c36f6776d77b-kube-api-access-mwprk\") pod \"must-gather-nclqp\" (UID: \"f15bc032-b4e0-46f9-bbac-c36f6776d77b\") " pod="openshift-must-gather-bsr5x/must-gather-nclqp" Jan 02 00:20:30 crc kubenswrapper[4766]: I0102 00:20:30.572602 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f15bc032-b4e0-46f9-bbac-c36f6776d77b-must-gather-output\") pod \"must-gather-nclqp\" (UID: \"f15bc032-b4e0-46f9-bbac-c36f6776d77b\") " pod="openshift-must-gather-bsr5x/must-gather-nclqp" Jan 02 00:20:30 crc kubenswrapper[4766]: I0102 00:20:30.595316 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwprk\" (UniqueName: \"kubernetes.io/projected/f15bc032-b4e0-46f9-bbac-c36f6776d77b-kube-api-access-mwprk\") pod \"must-gather-nclqp\" (UID: \"f15bc032-b4e0-46f9-bbac-c36f6776d77b\") " pod="openshift-must-gather-bsr5x/must-gather-nclqp" Jan 02 00:20:30 crc kubenswrapper[4766]: I0102 00:20:30.707820 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bsr5x/must-gather-nclqp" Jan 02 00:20:30 crc kubenswrapper[4766]: I0102 00:20:30.920064 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-bsr5x/must-gather-nclqp"] Jan 02 00:20:31 crc kubenswrapper[4766]: I0102 00:20:31.079821 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bsr5x/must-gather-nclqp" event={"ID":"f15bc032-b4e0-46f9-bbac-c36f6776d77b","Type":"ContainerStarted","Data":"e317c86595d63e6ce731b3bc662afd77f87ce04f5800616884004b4383294772"} Jan 02 00:20:42 crc kubenswrapper[4766]: I0102 00:20:42.169111 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bsr5x/must-gather-nclqp" event={"ID":"f15bc032-b4e0-46f9-bbac-c36f6776d77b","Type":"ContainerStarted","Data":"abc83b14aa8fff38a8d1e130a680a99d719773c5803abcf93165d33ffd678c4e"} Jan 02 00:20:42 crc kubenswrapper[4766]: I0102 00:20:42.169641 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bsr5x/must-gather-nclqp" event={"ID":"f15bc032-b4e0-46f9-bbac-c36f6776d77b","Type":"ContainerStarted","Data":"c111172d910b04563febd761f3908e27e594e2744b0cf25b7c2f637bda396c2d"} Jan 02 00:20:42 crc kubenswrapper[4766]: I0102 00:20:42.182074 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-bsr5x/must-gather-nclqp" podStartSLOduration=1.405039644 podStartE2EDuration="12.182064347s" podCreationTimestamp="2026-01-02 00:20:30 +0000 UTC" firstStartedPulling="2026-01-02 00:20:30.924334567 +0000 UTC m=+805.722821721" lastFinishedPulling="2026-01-02 00:20:41.70135927 +0000 UTC m=+816.499846424" observedRunningTime="2026-01-02 00:20:42.180590807 +0000 UTC m=+816.979077961" watchObservedRunningTime="2026-01-02 00:20:42.182064347 +0000 UTC m=+816.980551501" Jan 02 00:21:23 crc kubenswrapper[4766]: I0102 00:21:23.412239 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-2srpw_5901ea26-5679-4e06-b297-4fcbecbdd0ba/control-plane-machine-set-operator/0.log" Jan 02 00:21:23 crc kubenswrapper[4766]: I0102 00:21:23.539853 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-j4kpl_b2cfe289-2a25-4afa-829a-d44c8cd33fb3/kube-rbac-proxy/0.log" Jan 02 00:21:23 crc kubenswrapper[4766]: I0102 00:21:23.616112 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-j4kpl_b2cfe289-2a25-4afa-829a-d44c8cd33fb3/machine-api-operator/0.log" Jan 02 00:21:36 crc kubenswrapper[4766]: I0102 00:21:36.425074 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-2lsdf_8384b8ff-68e8-469b-8090-f5b763331396/cert-manager-controller/0.log" Jan 02 00:21:36 crc kubenswrapper[4766]: I0102 00:21:36.549370 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-59nnv_16413439-0e97-4c36-996e-5b24f45244c3/cert-manager-cainjector/0.log" Jan 02 00:21:36 crc kubenswrapper[4766]: I0102 00:21:36.606734 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-vcn6h_c35a05ea-0e51-4237-a1a2-d3963f7a9512/cert-manager-webhook/0.log" Jan 02 00:21:56 crc kubenswrapper[4766]: I0102 00:21:56.050240 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4_0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b/util/0.log" Jan 02 00:21:56 crc kubenswrapper[4766]: I0102 00:21:56.245261 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4_0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b/util/0.log" Jan 02 00:21:56 crc kubenswrapper[4766]: I0102 00:21:56.299121 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4_0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b/pull/0.log" Jan 02 00:21:56 crc kubenswrapper[4766]: I0102 00:21:56.303280 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4_0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b/pull/0.log" Jan 02 00:21:56 crc kubenswrapper[4766]: I0102 00:21:56.486248 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4_0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b/util/0.log" Jan 02 00:21:56 crc kubenswrapper[4766]: I0102 00:21:56.492869 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4_0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b/extract/0.log" Jan 02 00:21:56 crc kubenswrapper[4766]: I0102 00:21:56.505110 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931azk8d4_0d1bc21f-6bed-4ed8-8148-ac8f4c7fe28b/pull/0.log" Jan 02 00:21:56 crc kubenswrapper[4766]: I0102 00:21:56.639804 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6_85f7f29a-3282-41e0-bc15-b4e3e54f27c0/util/0.log" Jan 02 00:21:56 crc kubenswrapper[4766]: I0102 00:21:56.808966 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6_85f7f29a-3282-41e0-bc15-b4e3e54f27c0/util/0.log" Jan 02 00:21:56 crc kubenswrapper[4766]: I0102 00:21:56.839954 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6_85f7f29a-3282-41e0-bc15-b4e3e54f27c0/pull/0.log" Jan 02 00:21:56 crc kubenswrapper[4766]: I0102 00:21:56.872568 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6_85f7f29a-3282-41e0-bc15-b4e3e54f27c0/pull/0.log" Jan 02 00:21:57 crc kubenswrapper[4766]: I0102 00:21:57.003891 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6_85f7f29a-3282-41e0-bc15-b4e3e54f27c0/pull/0.log" Jan 02 00:21:57 crc kubenswrapper[4766]: I0102 00:21:57.035477 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6_85f7f29a-3282-41e0-bc15-b4e3e54f27c0/extract/0.log" Jan 02 00:21:57 crc kubenswrapper[4766]: I0102 00:21:57.040383 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fx79k6_85f7f29a-3282-41e0-bc15-b4e3e54f27c0/util/0.log" Jan 02 00:21:57 crc kubenswrapper[4766]: I0102 00:21:57.164880 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq_206c274e-ae59-471d-b0ca-5f6f34b28706/util/0.log" Jan 02 00:21:57 crc kubenswrapper[4766]: I0102 00:21:57.327697 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq_206c274e-ae59-471d-b0ca-5f6f34b28706/pull/0.log" Jan 02 00:21:57 crc kubenswrapper[4766]: I0102 00:21:57.378384 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq_206c274e-ae59-471d-b0ca-5f6f34b28706/util/0.log" Jan 02 00:21:57 crc kubenswrapper[4766]: I0102 00:21:57.388296 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq_206c274e-ae59-471d-b0ca-5f6f34b28706/pull/0.log" Jan 02 00:21:57 crc kubenswrapper[4766]: I0102 00:21:57.554916 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq_206c274e-ae59-471d-b0ca-5f6f34b28706/util/0.log" Jan 02 00:21:57 crc kubenswrapper[4766]: I0102 00:21:57.570794 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq_206c274e-ae59-471d-b0ca-5f6f34b28706/pull/0.log" Jan 02 00:21:57 crc kubenswrapper[4766]: I0102 00:21:57.624078 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e6zpkq_206c274e-ae59-471d-b0ca-5f6f34b28706/extract/0.log" Jan 02 00:21:57 crc kubenswrapper[4766]: I0102 00:21:57.743340 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f_1270eea2-c753-4f07-b4bb-b592f4d04827/util/0.log" Jan 02 00:21:57 crc kubenswrapper[4766]: I0102 00:21:57.940575 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f_1270eea2-c753-4f07-b4bb-b592f4d04827/util/0.log" Jan 02 00:21:57 crc kubenswrapper[4766]: I0102 00:21:57.958749 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f_1270eea2-c753-4f07-b4bb-b592f4d04827/pull/0.log" Jan 02 00:21:58 crc kubenswrapper[4766]: I0102 00:21:58.037141 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f_1270eea2-c753-4f07-b4bb-b592f4d04827/pull/0.log" Jan 02 00:21:58 crc kubenswrapper[4766]: I0102 00:21:58.122888 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f_1270eea2-c753-4f07-b4bb-b592f4d04827/util/0.log" Jan 02 00:21:58 crc kubenswrapper[4766]: I0102 00:21:58.132172 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f_1270eea2-c753-4f07-b4bb-b592f4d04827/pull/0.log" Jan 02 00:21:58 crc kubenswrapper[4766]: I0102 00:21:58.139673 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bn48f_1270eea2-c753-4f07-b4bb-b592f4d04827/extract/0.log" Jan 02 00:21:58 crc kubenswrapper[4766]: I0102 00:21:58.283296 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djhdl_30d047be-f175-4f90-b77a-34a24fea709d/extract-utilities/0.log" Jan 02 00:21:58 crc kubenswrapper[4766]: I0102 00:21:58.569460 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djhdl_30d047be-f175-4f90-b77a-34a24fea709d/extract-content/0.log" Jan 02 00:21:58 crc kubenswrapper[4766]: I0102 00:21:58.596812 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djhdl_30d047be-f175-4f90-b77a-34a24fea709d/extract-utilities/0.log" Jan 02 00:21:58 crc kubenswrapper[4766]: I0102 00:21:58.613118 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djhdl_30d047be-f175-4f90-b77a-34a24fea709d/extract-content/0.log" Jan 02 00:21:58 crc kubenswrapper[4766]: I0102 00:21:58.751763 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djhdl_30d047be-f175-4f90-b77a-34a24fea709d/extract-utilities/0.log" Jan 02 00:21:58 crc kubenswrapper[4766]: I0102 00:21:58.758708 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djhdl_30d047be-f175-4f90-b77a-34a24fea709d/extract-content/0.log" Jan 02 00:21:58 crc kubenswrapper[4766]: I0102 00:21:58.872877 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djhdl_30d047be-f175-4f90-b77a-34a24fea709d/registry-server/0.log" Jan 02 00:21:58 crc kubenswrapper[4766]: I0102 00:21:58.959136 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vwvkb_af16157a-9cb1-4df3-a0c1-66839e592859/extract-utilities/0.log" Jan 02 00:21:59 crc kubenswrapper[4766]: I0102 00:21:59.138135 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vwvkb_af16157a-9cb1-4df3-a0c1-66839e592859/extract-utilities/0.log" Jan 02 00:21:59 crc kubenswrapper[4766]: I0102 00:21:59.150284 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vwvkb_af16157a-9cb1-4df3-a0c1-66839e592859/extract-content/0.log" Jan 02 00:21:59 crc kubenswrapper[4766]: I0102 00:21:59.156872 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vwvkb_af16157a-9cb1-4df3-a0c1-66839e592859/extract-content/0.log" Jan 02 00:21:59 crc kubenswrapper[4766]: I0102 00:21:59.299916 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vwvkb_af16157a-9cb1-4df3-a0c1-66839e592859/extract-content/0.log" Jan 02 00:21:59 crc kubenswrapper[4766]: I0102 00:21:59.307268 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vwvkb_af16157a-9cb1-4df3-a0c1-66839e592859/extract-utilities/0.log" Jan 02 00:21:59 crc kubenswrapper[4766]: I0102 00:21:59.418992 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vwvkb_af16157a-9cb1-4df3-a0c1-66839e592859/registry-server/0.log" Jan 02 00:21:59 crc kubenswrapper[4766]: I0102 00:21:59.485039 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-g8fpt_82bbd02f-f842-47cf-bea4-2dcd3e78a02e/marketplace-operator/0.log" Jan 02 00:21:59 crc kubenswrapper[4766]: I0102 00:21:59.592053 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5tjl2_0d021b9f-72be-49d6-9fc8-5cc94e6e33ca/extract-utilities/0.log" Jan 02 00:21:59 crc kubenswrapper[4766]: I0102 00:21:59.718048 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5tjl2_0d021b9f-72be-49d6-9fc8-5cc94e6e33ca/extract-content/0.log" Jan 02 00:21:59 crc kubenswrapper[4766]: I0102 00:21:59.723024 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5tjl2_0d021b9f-72be-49d6-9fc8-5cc94e6e33ca/extract-utilities/0.log" Jan 02 00:21:59 crc kubenswrapper[4766]: I0102 00:21:59.760790 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5tjl2_0d021b9f-72be-49d6-9fc8-5cc94e6e33ca/extract-content/0.log" Jan 02 00:21:59 crc kubenswrapper[4766]: I0102 00:21:59.882547 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5tjl2_0d021b9f-72be-49d6-9fc8-5cc94e6e33ca/extract-utilities/0.log" Jan 02 00:21:59 crc kubenswrapper[4766]: I0102 00:21:59.900251 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5tjl2_0d021b9f-72be-49d6-9fc8-5cc94e6e33ca/extract-content/0.log" Jan 02 00:22:00 crc kubenswrapper[4766]: I0102 00:22:00.001065 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5tjl2_0d021b9f-72be-49d6-9fc8-5cc94e6e33ca/registry-server/0.log" Jan 02 00:22:11 crc kubenswrapper[4766]: I0102 00:22:11.703979 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-t8dpk_0bae17f8-e0b5-4658-a923-279617d6d958/prometheus-operator/0.log" Jan 02 00:22:11 crc kubenswrapper[4766]: I0102 00:22:11.927347 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-74fb98679c-8hjfc_9847ef6e-30b8-452a-9b4f-53cb6aabe7ca/prometheus-operator-admission-webhook/0.log" Jan 02 00:22:11 crc kubenswrapper[4766]: I0102 00:22:11.973938 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-74fb98679c-l98hh_3d8010c4-a7db-48af-8230-720463cc00a3/prometheus-operator-admission-webhook/0.log" Jan 02 00:22:12 crc kubenswrapper[4766]: I0102 00:22:12.143574 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-clrx5_c2cc883f-2ba7-499a-b5cc-b84de8b742ba/operator/0.log" Jan 02 00:22:12 crc kubenswrapper[4766]: I0102 00:22:12.169862 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-qptw2_d0926af8-bde6-483d-b35a-06b9f1e39558/perses-operator/0.log" Jan 02 00:22:22 crc kubenswrapper[4766]: I0102 00:22:22.942904 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4pt99"] Jan 02 00:22:22 crc kubenswrapper[4766]: I0102 00:22:22.945180 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4pt99" Jan 02 00:22:22 crc kubenswrapper[4766]: I0102 00:22:22.973495 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4pt99"] Jan 02 00:22:23 crc kubenswrapper[4766]: I0102 00:22:23.081916 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pv4c4\" (UniqueName: \"kubernetes.io/projected/61c2ec8c-8a95-4916-89b7-ccee7388af5b-kube-api-access-pv4c4\") pod \"certified-operators-4pt99\" (UID: \"61c2ec8c-8a95-4916-89b7-ccee7388af5b\") " pod="openshift-marketplace/certified-operators-4pt99" Jan 02 00:22:23 crc kubenswrapper[4766]: I0102 00:22:23.082186 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61c2ec8c-8a95-4916-89b7-ccee7388af5b-catalog-content\") pod \"certified-operators-4pt99\" (UID: \"61c2ec8c-8a95-4916-89b7-ccee7388af5b\") " pod="openshift-marketplace/certified-operators-4pt99" Jan 02 00:22:23 crc kubenswrapper[4766]: I0102 00:22:23.082379 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61c2ec8c-8a95-4916-89b7-ccee7388af5b-utilities\") pod \"certified-operators-4pt99\" (UID: \"61c2ec8c-8a95-4916-89b7-ccee7388af5b\") " pod="openshift-marketplace/certified-operators-4pt99" Jan 02 00:22:23 crc kubenswrapper[4766]: I0102 00:22:23.184208 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pv4c4\" (UniqueName: \"kubernetes.io/projected/61c2ec8c-8a95-4916-89b7-ccee7388af5b-kube-api-access-pv4c4\") pod \"certified-operators-4pt99\" (UID: \"61c2ec8c-8a95-4916-89b7-ccee7388af5b\") " pod="openshift-marketplace/certified-operators-4pt99" Jan 02 00:22:23 crc kubenswrapper[4766]: I0102 00:22:23.184276 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61c2ec8c-8a95-4916-89b7-ccee7388af5b-catalog-content\") pod \"certified-operators-4pt99\" (UID: \"61c2ec8c-8a95-4916-89b7-ccee7388af5b\") " pod="openshift-marketplace/certified-operators-4pt99" Jan 02 00:22:23 crc kubenswrapper[4766]: I0102 00:22:23.184310 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61c2ec8c-8a95-4916-89b7-ccee7388af5b-utilities\") pod \"certified-operators-4pt99\" (UID: \"61c2ec8c-8a95-4916-89b7-ccee7388af5b\") " pod="openshift-marketplace/certified-operators-4pt99" Jan 02 00:22:23 crc kubenswrapper[4766]: I0102 00:22:23.184752 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61c2ec8c-8a95-4916-89b7-ccee7388af5b-utilities\") pod \"certified-operators-4pt99\" (UID: \"61c2ec8c-8a95-4916-89b7-ccee7388af5b\") " pod="openshift-marketplace/certified-operators-4pt99" Jan 02 00:22:23 crc kubenswrapper[4766]: I0102 00:22:23.184857 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61c2ec8c-8a95-4916-89b7-ccee7388af5b-catalog-content\") pod \"certified-operators-4pt99\" (UID: \"61c2ec8c-8a95-4916-89b7-ccee7388af5b\") " pod="openshift-marketplace/certified-operators-4pt99" Jan 02 00:22:23 crc kubenswrapper[4766]: I0102 00:22:23.209461 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pv4c4\" (UniqueName: \"kubernetes.io/projected/61c2ec8c-8a95-4916-89b7-ccee7388af5b-kube-api-access-pv4c4\") pod \"certified-operators-4pt99\" (UID: \"61c2ec8c-8a95-4916-89b7-ccee7388af5b\") " pod="openshift-marketplace/certified-operators-4pt99" Jan 02 00:22:23 crc kubenswrapper[4766]: I0102 00:22:23.270301 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4pt99" Jan 02 00:22:23 crc kubenswrapper[4766]: I0102 00:22:23.580041 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4pt99"] Jan 02 00:22:23 crc kubenswrapper[4766]: I0102 00:22:23.873652 4766 generic.go:334] "Generic (PLEG): container finished" podID="61c2ec8c-8a95-4916-89b7-ccee7388af5b" containerID="10d9e362002a870acdeb632b2e2a9ef393696e5c94a165eac6b90223a056f87d" exitCode=0 Jan 02 00:22:23 crc kubenswrapper[4766]: I0102 00:22:23.873705 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pt99" event={"ID":"61c2ec8c-8a95-4916-89b7-ccee7388af5b","Type":"ContainerDied","Data":"10d9e362002a870acdeb632b2e2a9ef393696e5c94a165eac6b90223a056f87d"} Jan 02 00:22:23 crc kubenswrapper[4766]: I0102 00:22:23.873731 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pt99" event={"ID":"61c2ec8c-8a95-4916-89b7-ccee7388af5b","Type":"ContainerStarted","Data":"9e2e7e39b8ca2fdbdfed8d5072eb7eac62a4beabc074c7e5dd2179b28356adc5"} Jan 02 00:22:24 crc kubenswrapper[4766]: I0102 00:22:24.883463 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pt99" event={"ID":"61c2ec8c-8a95-4916-89b7-ccee7388af5b","Type":"ContainerStarted","Data":"511e5844d7759caf92e1850528b15503af8c2545679690aa3f57cece048f9ceb"} Jan 02 00:22:25 crc kubenswrapper[4766]: I0102 00:22:25.892471 4766 generic.go:334] "Generic (PLEG): container finished" podID="61c2ec8c-8a95-4916-89b7-ccee7388af5b" containerID="511e5844d7759caf92e1850528b15503af8c2545679690aa3f57cece048f9ceb" exitCode=0 Jan 02 00:22:25 crc kubenswrapper[4766]: I0102 00:22:25.892526 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pt99" event={"ID":"61c2ec8c-8a95-4916-89b7-ccee7388af5b","Type":"ContainerDied","Data":"511e5844d7759caf92e1850528b15503af8c2545679690aa3f57cece048f9ceb"} Jan 02 00:22:26 crc kubenswrapper[4766]: I0102 00:22:26.901700 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pt99" event={"ID":"61c2ec8c-8a95-4916-89b7-ccee7388af5b","Type":"ContainerStarted","Data":"f651007596a69569adacb6ed411142773d5ec30044c4928dd5d2d0d1b0d22f11"} Jan 02 00:22:26 crc kubenswrapper[4766]: I0102 00:22:26.925951 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4pt99" podStartSLOduration=2.523621499 podStartE2EDuration="4.925926763s" podCreationTimestamp="2026-01-02 00:22:22 +0000 UTC" firstStartedPulling="2026-01-02 00:22:23.875049848 +0000 UTC m=+918.673537002" lastFinishedPulling="2026-01-02 00:22:26.277355112 +0000 UTC m=+921.075842266" observedRunningTime="2026-01-02 00:22:26.922262073 +0000 UTC m=+921.720749227" watchObservedRunningTime="2026-01-02 00:22:26.925926763 +0000 UTC m=+921.724413917" Jan 02 00:22:33 crc kubenswrapper[4766]: I0102 00:22:33.272095 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4pt99" Jan 02 00:22:33 crc kubenswrapper[4766]: I0102 00:22:33.272907 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4pt99" Jan 02 00:22:33 crc kubenswrapper[4766]: I0102 00:22:33.313785 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4pt99" Jan 02 00:22:34 crc kubenswrapper[4766]: I0102 00:22:34.018072 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4pt99" Jan 02 00:22:34 crc kubenswrapper[4766]: I0102 00:22:34.108297 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4pt99"] Jan 02 00:22:35 crc kubenswrapper[4766]: I0102 00:22:35.970226 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4pt99" podUID="61c2ec8c-8a95-4916-89b7-ccee7388af5b" containerName="registry-server" containerID="cri-o://f651007596a69569adacb6ed411142773d5ec30044c4928dd5d2d0d1b0d22f11" gracePeriod=2 Jan 02 00:22:36 crc kubenswrapper[4766]: I0102 00:22:36.887354 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4pt99" Jan 02 00:22:36 crc kubenswrapper[4766]: I0102 00:22:36.976303 4766 generic.go:334] "Generic (PLEG): container finished" podID="61c2ec8c-8a95-4916-89b7-ccee7388af5b" containerID="f651007596a69569adacb6ed411142773d5ec30044c4928dd5d2d0d1b0d22f11" exitCode=0 Jan 02 00:22:36 crc kubenswrapper[4766]: I0102 00:22:36.976522 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pt99" event={"ID":"61c2ec8c-8a95-4916-89b7-ccee7388af5b","Type":"ContainerDied","Data":"f651007596a69569adacb6ed411142773d5ec30044c4928dd5d2d0d1b0d22f11"} Jan 02 00:22:36 crc kubenswrapper[4766]: I0102 00:22:36.976582 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4pt99" Jan 02 00:22:36 crc kubenswrapper[4766]: I0102 00:22:36.976654 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pt99" event={"ID":"61c2ec8c-8a95-4916-89b7-ccee7388af5b","Type":"ContainerDied","Data":"9e2e7e39b8ca2fdbdfed8d5072eb7eac62a4beabc074c7e5dd2179b28356adc5"} Jan 02 00:22:36 crc kubenswrapper[4766]: I0102 00:22:36.976672 4766 scope.go:117] "RemoveContainer" containerID="f651007596a69569adacb6ed411142773d5ec30044c4928dd5d2d0d1b0d22f11" Jan 02 00:22:36 crc kubenswrapper[4766]: I0102 00:22:36.995031 4766 scope.go:117] "RemoveContainer" containerID="511e5844d7759caf92e1850528b15503af8c2545679690aa3f57cece048f9ceb" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.021320 4766 scope.go:117] "RemoveContainer" containerID="10d9e362002a870acdeb632b2e2a9ef393696e5c94a165eac6b90223a056f87d" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.036980 4766 scope.go:117] "RemoveContainer" containerID="f651007596a69569adacb6ed411142773d5ec30044c4928dd5d2d0d1b0d22f11" Jan 02 00:22:37 crc kubenswrapper[4766]: E0102 00:22:37.037447 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f651007596a69569adacb6ed411142773d5ec30044c4928dd5d2d0d1b0d22f11\": container with ID starting with f651007596a69569adacb6ed411142773d5ec30044c4928dd5d2d0d1b0d22f11 not found: ID does not exist" containerID="f651007596a69569adacb6ed411142773d5ec30044c4928dd5d2d0d1b0d22f11" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.037476 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f651007596a69569adacb6ed411142773d5ec30044c4928dd5d2d0d1b0d22f11"} err="failed to get container status \"f651007596a69569adacb6ed411142773d5ec30044c4928dd5d2d0d1b0d22f11\": rpc error: code = NotFound desc = could not find container \"f651007596a69569adacb6ed411142773d5ec30044c4928dd5d2d0d1b0d22f11\": container with ID starting with f651007596a69569adacb6ed411142773d5ec30044c4928dd5d2d0d1b0d22f11 not found: ID does not exist" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.037496 4766 scope.go:117] "RemoveContainer" containerID="511e5844d7759caf92e1850528b15503af8c2545679690aa3f57cece048f9ceb" Jan 02 00:22:37 crc kubenswrapper[4766]: E0102 00:22:37.037713 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"511e5844d7759caf92e1850528b15503af8c2545679690aa3f57cece048f9ceb\": container with ID starting with 511e5844d7759caf92e1850528b15503af8c2545679690aa3f57cece048f9ceb not found: ID does not exist" containerID="511e5844d7759caf92e1850528b15503af8c2545679690aa3f57cece048f9ceb" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.037732 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"511e5844d7759caf92e1850528b15503af8c2545679690aa3f57cece048f9ceb"} err="failed to get container status \"511e5844d7759caf92e1850528b15503af8c2545679690aa3f57cece048f9ceb\": rpc error: code = NotFound desc = could not find container \"511e5844d7759caf92e1850528b15503af8c2545679690aa3f57cece048f9ceb\": container with ID starting with 511e5844d7759caf92e1850528b15503af8c2545679690aa3f57cece048f9ceb not found: ID does not exist" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.037743 4766 scope.go:117] "RemoveContainer" containerID="10d9e362002a870acdeb632b2e2a9ef393696e5c94a165eac6b90223a056f87d" Jan 02 00:22:37 crc kubenswrapper[4766]: E0102 00:22:37.038031 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10d9e362002a870acdeb632b2e2a9ef393696e5c94a165eac6b90223a056f87d\": container with ID starting with 10d9e362002a870acdeb632b2e2a9ef393696e5c94a165eac6b90223a056f87d not found: ID does not exist" containerID="10d9e362002a870acdeb632b2e2a9ef393696e5c94a165eac6b90223a056f87d" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.038049 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10d9e362002a870acdeb632b2e2a9ef393696e5c94a165eac6b90223a056f87d"} err="failed to get container status \"10d9e362002a870acdeb632b2e2a9ef393696e5c94a165eac6b90223a056f87d\": rpc error: code = NotFound desc = could not find container \"10d9e362002a870acdeb632b2e2a9ef393696e5c94a165eac6b90223a056f87d\": container with ID starting with 10d9e362002a870acdeb632b2e2a9ef393696e5c94a165eac6b90223a056f87d not found: ID does not exist" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.086784 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61c2ec8c-8a95-4916-89b7-ccee7388af5b-catalog-content\") pod \"61c2ec8c-8a95-4916-89b7-ccee7388af5b\" (UID: \"61c2ec8c-8a95-4916-89b7-ccee7388af5b\") " Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.086875 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pv4c4\" (UniqueName: \"kubernetes.io/projected/61c2ec8c-8a95-4916-89b7-ccee7388af5b-kube-api-access-pv4c4\") pod \"61c2ec8c-8a95-4916-89b7-ccee7388af5b\" (UID: \"61c2ec8c-8a95-4916-89b7-ccee7388af5b\") " Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.087456 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61c2ec8c-8a95-4916-89b7-ccee7388af5b-utilities\") pod \"61c2ec8c-8a95-4916-89b7-ccee7388af5b\" (UID: \"61c2ec8c-8a95-4916-89b7-ccee7388af5b\") " Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.088440 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61c2ec8c-8a95-4916-89b7-ccee7388af5b-utilities" (OuterVolumeSpecName: "utilities") pod "61c2ec8c-8a95-4916-89b7-ccee7388af5b" (UID: "61c2ec8c-8a95-4916-89b7-ccee7388af5b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.099411 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61c2ec8c-8a95-4916-89b7-ccee7388af5b-kube-api-access-pv4c4" (OuterVolumeSpecName: "kube-api-access-pv4c4") pod "61c2ec8c-8a95-4916-89b7-ccee7388af5b" (UID: "61c2ec8c-8a95-4916-89b7-ccee7388af5b"). InnerVolumeSpecName "kube-api-access-pv4c4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.130796 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-z2c6d"] Jan 02 00:22:37 crc kubenswrapper[4766]: E0102 00:22:37.133053 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61c2ec8c-8a95-4916-89b7-ccee7388af5b" containerName="extract-content" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.133077 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="61c2ec8c-8a95-4916-89b7-ccee7388af5b" containerName="extract-content" Jan 02 00:22:37 crc kubenswrapper[4766]: E0102 00:22:37.133114 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61c2ec8c-8a95-4916-89b7-ccee7388af5b" containerName="registry-server" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.133122 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="61c2ec8c-8a95-4916-89b7-ccee7388af5b" containerName="registry-server" Jan 02 00:22:37 crc kubenswrapper[4766]: E0102 00:22:37.133133 4766 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61c2ec8c-8a95-4916-89b7-ccee7388af5b" containerName="extract-utilities" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.133144 4766 state_mem.go:107] "Deleted CPUSet assignment" podUID="61c2ec8c-8a95-4916-89b7-ccee7388af5b" containerName="extract-utilities" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.135791 4766 memory_manager.go:354] "RemoveStaleState removing state" podUID="61c2ec8c-8a95-4916-89b7-ccee7388af5b" containerName="registry-server" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.138808 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z2c6d" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.150346 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z2c6d"] Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.164151 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61c2ec8c-8a95-4916-89b7-ccee7388af5b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "61c2ec8c-8a95-4916-89b7-ccee7388af5b" (UID: "61c2ec8c-8a95-4916-89b7-ccee7388af5b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.188730 4766 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61c2ec8c-8a95-4916-89b7-ccee7388af5b-utilities\") on node \"crc\" DevicePath \"\"" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.188779 4766 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61c2ec8c-8a95-4916-89b7-ccee7388af5b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.188817 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pv4c4\" (UniqueName: \"kubernetes.io/projected/61c2ec8c-8a95-4916-89b7-ccee7388af5b-kube-api-access-pv4c4\") on node \"crc\" DevicePath \"\"" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.289494 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-catalog-content\") pod \"redhat-operators-z2c6d\" (UID: \"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3\") " pod="openshift-marketplace/redhat-operators-z2c6d" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.289738 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-utilities\") pod \"redhat-operators-z2c6d\" (UID: \"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3\") " pod="openshift-marketplace/redhat-operators-z2c6d" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.289919 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqvm2\" (UniqueName: \"kubernetes.io/projected/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-kube-api-access-vqvm2\") pod \"redhat-operators-z2c6d\" (UID: \"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3\") " pod="openshift-marketplace/redhat-operators-z2c6d" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.303401 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4pt99"] Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.306801 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4pt99"] Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.391160 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-utilities\") pod \"redhat-operators-z2c6d\" (UID: \"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3\") " pod="openshift-marketplace/redhat-operators-z2c6d" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.391228 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqvm2\" (UniqueName: \"kubernetes.io/projected/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-kube-api-access-vqvm2\") pod \"redhat-operators-z2c6d\" (UID: \"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3\") " pod="openshift-marketplace/redhat-operators-z2c6d" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.391301 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-catalog-content\") pod \"redhat-operators-z2c6d\" (UID: \"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3\") " pod="openshift-marketplace/redhat-operators-z2c6d" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.391717 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-catalog-content\") pod \"redhat-operators-z2c6d\" (UID: \"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3\") " pod="openshift-marketplace/redhat-operators-z2c6d" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.391785 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-utilities\") pod \"redhat-operators-z2c6d\" (UID: \"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3\") " pod="openshift-marketplace/redhat-operators-z2c6d" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.417077 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqvm2\" (UniqueName: \"kubernetes.io/projected/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-kube-api-access-vqvm2\") pod \"redhat-operators-z2c6d\" (UID: \"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3\") " pod="openshift-marketplace/redhat-operators-z2c6d" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.460132 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z2c6d" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.555361 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61c2ec8c-8a95-4916-89b7-ccee7388af5b" path="/var/lib/kubelet/pods/61c2ec8c-8a95-4916-89b7-ccee7388af5b/volumes" Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.898746 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z2c6d"] Jan 02 00:22:37 crc kubenswrapper[4766]: I0102 00:22:37.985078 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z2c6d" event={"ID":"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3","Type":"ContainerStarted","Data":"a399d9d2ebeb94996949d1d49b0a595d2e905a3255fba1bf9e5940f36a41e7b4"} Jan 02 00:22:38 crc kubenswrapper[4766]: I0102 00:22:38.996987 4766 generic.go:334] "Generic (PLEG): container finished" podID="6e8610d7-e17b-47e2-bf60-ad1d649a0cd3" containerID="5f07c76a5f45c45b85394c960a1adcc86d5647de513ed64bee8819ad0f828680" exitCode=0 Jan 02 00:22:38 crc kubenswrapper[4766]: I0102 00:22:38.997055 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z2c6d" event={"ID":"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3","Type":"ContainerDied","Data":"5f07c76a5f45c45b85394c960a1adcc86d5647de513ed64bee8819ad0f828680"} Jan 02 00:22:38 crc kubenswrapper[4766]: I0102 00:22:38.999588 4766 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 02 00:22:41 crc kubenswrapper[4766]: I0102 00:22:41.010062 4766 generic.go:334] "Generic (PLEG): container finished" podID="6e8610d7-e17b-47e2-bf60-ad1d649a0cd3" containerID="f7dee3190213d1e9581e21f5c3fa44f36a218303ebf9abfebba264eb15c929cc" exitCode=0 Jan 02 00:22:41 crc kubenswrapper[4766]: I0102 00:22:41.010119 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z2c6d" event={"ID":"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3","Type":"ContainerDied","Data":"f7dee3190213d1e9581e21f5c3fa44f36a218303ebf9abfebba264eb15c929cc"} Jan 02 00:22:41 crc kubenswrapper[4766]: I0102 00:22:41.339530 4766 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-48gfh"] Jan 02 00:22:41 crc kubenswrapper[4766]: I0102 00:22:41.400005 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-48gfh" Jan 02 00:22:41 crc kubenswrapper[4766]: I0102 00:22:41.421621 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-48gfh"] Jan 02 00:22:41 crc kubenswrapper[4766]: I0102 00:22:41.503906 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bd2p4\" (UniqueName: \"kubernetes.io/projected/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-kube-api-access-bd2p4\") pod \"community-operators-48gfh\" (UID: \"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7\") " pod="openshift-marketplace/community-operators-48gfh" Jan 02 00:22:41 crc kubenswrapper[4766]: I0102 00:22:41.503976 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-utilities\") pod \"community-operators-48gfh\" (UID: \"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7\") " pod="openshift-marketplace/community-operators-48gfh" Jan 02 00:22:41 crc kubenswrapper[4766]: I0102 00:22:41.504008 4766 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-catalog-content\") pod \"community-operators-48gfh\" (UID: \"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7\") " pod="openshift-marketplace/community-operators-48gfh" Jan 02 00:22:41 crc kubenswrapper[4766]: I0102 00:22:41.605028 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-catalog-content\") pod \"community-operators-48gfh\" (UID: \"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7\") " pod="openshift-marketplace/community-operators-48gfh" Jan 02 00:22:41 crc kubenswrapper[4766]: I0102 00:22:41.605141 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bd2p4\" (UniqueName: \"kubernetes.io/projected/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-kube-api-access-bd2p4\") pod \"community-operators-48gfh\" (UID: \"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7\") " pod="openshift-marketplace/community-operators-48gfh" Jan 02 00:22:41 crc kubenswrapper[4766]: I0102 00:22:41.605168 4766 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-utilities\") pod \"community-operators-48gfh\" (UID: \"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7\") " pod="openshift-marketplace/community-operators-48gfh" Jan 02 00:22:41 crc kubenswrapper[4766]: I0102 00:22:41.605817 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-catalog-content\") pod \"community-operators-48gfh\" (UID: \"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7\") " pod="openshift-marketplace/community-operators-48gfh" Jan 02 00:22:41 crc kubenswrapper[4766]: I0102 00:22:41.605967 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-utilities\") pod \"community-operators-48gfh\" (UID: \"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7\") " pod="openshift-marketplace/community-operators-48gfh" Jan 02 00:22:41 crc kubenswrapper[4766]: I0102 00:22:41.647766 4766 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bd2p4\" (UniqueName: \"kubernetes.io/projected/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-kube-api-access-bd2p4\") pod \"community-operators-48gfh\" (UID: \"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7\") " pod="openshift-marketplace/community-operators-48gfh" Jan 02 00:22:41 crc kubenswrapper[4766]: I0102 00:22:41.737242 4766 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-48gfh" Jan 02 00:22:42 crc kubenswrapper[4766]: I0102 00:22:42.059298 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z2c6d" event={"ID":"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3","Type":"ContainerStarted","Data":"29e522a1077b6e237b7b8d1e1da7e68166e4c9442cd752948bd71cfcbb502db6"} Jan 02 00:22:42 crc kubenswrapper[4766]: I0102 00:22:42.102924 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-z2c6d" podStartSLOduration=2.507010785 podStartE2EDuration="5.102907945s" podCreationTimestamp="2026-01-02 00:22:37 +0000 UTC" firstStartedPulling="2026-01-02 00:22:38.999277734 +0000 UTC m=+933.797764908" lastFinishedPulling="2026-01-02 00:22:41.595174914 +0000 UTC m=+936.393662068" observedRunningTime="2026-01-02 00:22:42.101324022 +0000 UTC m=+936.899811176" watchObservedRunningTime="2026-01-02 00:22:42.102907945 +0000 UTC m=+936.901395099" Jan 02 00:22:42 crc kubenswrapper[4766]: I0102 00:22:42.278584 4766 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-48gfh"] Jan 02 00:22:42 crc kubenswrapper[4766]: W0102 00:22:42.294497 4766 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff9ce5f5_d6d6_4f9f_a284_04bc38cac4a7.slice/crio-7cf254a7a1c85abba8525f1f6875a4e6aaacf445072f61ef4ca6d68c127a98c6 WatchSource:0}: Error finding container 7cf254a7a1c85abba8525f1f6875a4e6aaacf445072f61ef4ca6d68c127a98c6: Status 404 returned error can't find the container with id 7cf254a7a1c85abba8525f1f6875a4e6aaacf445072f61ef4ca6d68c127a98c6 Jan 02 00:22:43 crc kubenswrapper[4766]: I0102 00:22:43.066267 4766 generic.go:334] "Generic (PLEG): container finished" podID="ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7" containerID="89c124cc5d09bf2b26bba4fa6f4efd7116b5b7ba9d46e533a7ed24b64a5ceb64" exitCode=0 Jan 02 00:22:43 crc kubenswrapper[4766]: I0102 00:22:43.066309 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-48gfh" event={"ID":"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7","Type":"ContainerDied","Data":"89c124cc5d09bf2b26bba4fa6f4efd7116b5b7ba9d46e533a7ed24b64a5ceb64"} Jan 02 00:22:43 crc kubenswrapper[4766]: I0102 00:22:43.066649 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-48gfh" event={"ID":"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7","Type":"ContainerStarted","Data":"7cf254a7a1c85abba8525f1f6875a4e6aaacf445072f61ef4ca6d68c127a98c6"} Jan 02 00:22:44 crc kubenswrapper[4766]: I0102 00:22:44.073885 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-48gfh" event={"ID":"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7","Type":"ContainerStarted","Data":"dc1f65e50f49c160c83ef18b1c9693a410460b1ca83f553b4c647396dfcfb2fa"} Jan 02 00:22:45 crc kubenswrapper[4766]: I0102 00:22:45.081115 4766 generic.go:334] "Generic (PLEG): container finished" podID="ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7" containerID="dc1f65e50f49c160c83ef18b1c9693a410460b1ca83f553b4c647396dfcfb2fa" exitCode=0 Jan 02 00:22:45 crc kubenswrapper[4766]: I0102 00:22:45.081161 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-48gfh" event={"ID":"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7","Type":"ContainerDied","Data":"dc1f65e50f49c160c83ef18b1c9693a410460b1ca83f553b4c647396dfcfb2fa"} Jan 02 00:22:47 crc kubenswrapper[4766]: I0102 00:22:47.095706 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-48gfh" event={"ID":"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7","Type":"ContainerStarted","Data":"8f627728c5a619b06231a0b1e9bc548c64ec9a857fe622601248c0d81fed76aa"} Jan 02 00:22:47 crc kubenswrapper[4766]: I0102 00:22:47.116017 4766 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-48gfh" podStartSLOduration=3.151801297 podStartE2EDuration="6.115997154s" podCreationTimestamp="2026-01-02 00:22:41 +0000 UTC" firstStartedPulling="2026-01-02 00:22:43.068204082 +0000 UTC m=+937.866691236" lastFinishedPulling="2026-01-02 00:22:46.032399929 +0000 UTC m=+940.830887093" observedRunningTime="2026-01-02 00:22:47.114494393 +0000 UTC m=+941.912981547" watchObservedRunningTime="2026-01-02 00:22:47.115997154 +0000 UTC m=+941.914484318" Jan 02 00:22:47 crc kubenswrapper[4766]: I0102 00:22:47.461144 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-z2c6d" Jan 02 00:22:47 crc kubenswrapper[4766]: I0102 00:22:47.461520 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-z2c6d" Jan 02 00:22:48 crc kubenswrapper[4766]: I0102 00:22:48.520323 4766 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-z2c6d" podUID="6e8610d7-e17b-47e2-bf60-ad1d649a0cd3" containerName="registry-server" probeResult="failure" output=< Jan 02 00:22:48 crc kubenswrapper[4766]: timeout: failed to connect service ":50051" within 1s Jan 02 00:22:48 crc kubenswrapper[4766]: > Jan 02 00:22:51 crc kubenswrapper[4766]: I0102 00:22:51.737723 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-48gfh" Jan 02 00:22:51 crc kubenswrapper[4766]: I0102 00:22:51.739855 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-48gfh" Jan 02 00:22:51 crc kubenswrapper[4766]: I0102 00:22:51.778526 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-48gfh" Jan 02 00:22:52 crc kubenswrapper[4766]: I0102 00:22:52.172027 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-48gfh" Jan 02 00:22:52 crc kubenswrapper[4766]: I0102 00:22:52.225969 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-48gfh"] Jan 02 00:22:54 crc kubenswrapper[4766]: I0102 00:22:54.142632 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-48gfh" podUID="ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7" containerName="registry-server" containerID="cri-o://8f627728c5a619b06231a0b1e9bc548c64ec9a857fe622601248c0d81fed76aa" gracePeriod=2 Jan 02 00:22:54 crc kubenswrapper[4766]: I0102 00:22:54.181073 4766 patch_prober.go:28] interesting pod/machine-config-daemon-hlsf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 02 00:22:54 crc kubenswrapper[4766]: I0102 00:22:54.181144 4766 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 02 00:22:56 crc kubenswrapper[4766]: I0102 00:22:56.351244 4766 generic.go:334] "Generic (PLEG): container finished" podID="ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7" containerID="8f627728c5a619b06231a0b1e9bc548c64ec9a857fe622601248c0d81fed76aa" exitCode=0 Jan 02 00:22:56 crc kubenswrapper[4766]: I0102 00:22:56.351557 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-48gfh" event={"ID":"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7","Type":"ContainerDied","Data":"8f627728c5a619b06231a0b1e9bc548c64ec9a857fe622601248c0d81fed76aa"} Jan 02 00:22:56 crc kubenswrapper[4766]: I0102 00:22:56.579824 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-48gfh" Jan 02 00:22:56 crc kubenswrapper[4766]: I0102 00:22:56.726272 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bd2p4\" (UniqueName: \"kubernetes.io/projected/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-kube-api-access-bd2p4\") pod \"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7\" (UID: \"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7\") " Jan 02 00:22:56 crc kubenswrapper[4766]: I0102 00:22:56.726349 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-utilities\") pod \"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7\" (UID: \"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7\") " Jan 02 00:22:56 crc kubenswrapper[4766]: I0102 00:22:56.726376 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-catalog-content\") pod \"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7\" (UID: \"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7\") " Jan 02 00:22:56 crc kubenswrapper[4766]: I0102 00:22:56.727378 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-utilities" (OuterVolumeSpecName: "utilities") pod "ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7" (UID: "ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:22:56 crc kubenswrapper[4766]: I0102 00:22:56.739219 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-kube-api-access-bd2p4" (OuterVolumeSpecName: "kube-api-access-bd2p4") pod "ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7" (UID: "ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7"). InnerVolumeSpecName "kube-api-access-bd2p4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:22:56 crc kubenswrapper[4766]: I0102 00:22:56.806895 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7" (UID: "ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:22:56 crc kubenswrapper[4766]: I0102 00:22:56.827471 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bd2p4\" (UniqueName: \"kubernetes.io/projected/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-kube-api-access-bd2p4\") on node \"crc\" DevicePath \"\"" Jan 02 00:22:56 crc kubenswrapper[4766]: I0102 00:22:56.827514 4766 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-utilities\") on node \"crc\" DevicePath \"\"" Jan 02 00:22:56 crc kubenswrapper[4766]: I0102 00:22:56.827527 4766 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 02 00:22:57 crc kubenswrapper[4766]: I0102 00:22:57.362396 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-48gfh" event={"ID":"ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7","Type":"ContainerDied","Data":"7cf254a7a1c85abba8525f1f6875a4e6aaacf445072f61ef4ca6d68c127a98c6"} Jan 02 00:22:57 crc kubenswrapper[4766]: I0102 00:22:57.362474 4766 scope.go:117] "RemoveContainer" containerID="8f627728c5a619b06231a0b1e9bc548c64ec9a857fe622601248c0d81fed76aa" Jan 02 00:22:57 crc kubenswrapper[4766]: I0102 00:22:57.362502 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-48gfh" Jan 02 00:22:57 crc kubenswrapper[4766]: I0102 00:22:57.383413 4766 scope.go:117] "RemoveContainer" containerID="dc1f65e50f49c160c83ef18b1c9693a410460b1ca83f553b4c647396dfcfb2fa" Jan 02 00:22:57 crc kubenswrapper[4766]: I0102 00:22:57.419750 4766 scope.go:117] "RemoveContainer" containerID="89c124cc5d09bf2b26bba4fa6f4efd7116b5b7ba9d46e533a7ed24b64a5ceb64" Jan 02 00:22:57 crc kubenswrapper[4766]: I0102 00:22:57.421776 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-48gfh"] Jan 02 00:22:57 crc kubenswrapper[4766]: I0102 00:22:57.426894 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-48gfh"] Jan 02 00:22:57 crc kubenswrapper[4766]: I0102 00:22:57.514316 4766 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-z2c6d" Jan 02 00:22:57 crc kubenswrapper[4766]: I0102 00:22:57.546137 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7" path="/var/lib/kubelet/pods/ff9ce5f5-d6d6-4f9f-a284-04bc38cac4a7/volumes" Jan 02 00:22:57 crc kubenswrapper[4766]: I0102 00:22:57.560319 4766 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-z2c6d" Jan 02 00:22:58 crc kubenswrapper[4766]: I0102 00:22:58.810598 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z2c6d"] Jan 02 00:22:59 crc kubenswrapper[4766]: I0102 00:22:59.375070 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-z2c6d" podUID="6e8610d7-e17b-47e2-bf60-ad1d649a0cd3" containerName="registry-server" containerID="cri-o://29e522a1077b6e237b7b8d1e1da7e68166e4c9442cd752948bd71cfcbb502db6" gracePeriod=2 Jan 02 00:22:59 crc kubenswrapper[4766]: I0102 00:22:59.753359 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z2c6d" Jan 02 00:22:59 crc kubenswrapper[4766]: I0102 00:22:59.867036 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-catalog-content\") pod \"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3\" (UID: \"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3\") " Jan 02 00:22:59 crc kubenswrapper[4766]: I0102 00:22:59.876179 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqvm2\" (UniqueName: \"kubernetes.io/projected/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-kube-api-access-vqvm2\") pod \"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3\" (UID: \"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3\") " Jan 02 00:22:59 crc kubenswrapper[4766]: I0102 00:22:59.876318 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-utilities\") pod \"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3\" (UID: \"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3\") " Jan 02 00:22:59 crc kubenswrapper[4766]: I0102 00:22:59.882389 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-utilities" (OuterVolumeSpecName: "utilities") pod "6e8610d7-e17b-47e2-bf60-ad1d649a0cd3" (UID: "6e8610d7-e17b-47e2-bf60-ad1d649a0cd3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:22:59 crc kubenswrapper[4766]: I0102 00:22:59.883826 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-kube-api-access-vqvm2" (OuterVolumeSpecName: "kube-api-access-vqvm2") pod "6e8610d7-e17b-47e2-bf60-ad1d649a0cd3" (UID: "6e8610d7-e17b-47e2-bf60-ad1d649a0cd3"). InnerVolumeSpecName "kube-api-access-vqvm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:22:59 crc kubenswrapper[4766]: I0102 00:22:59.978446 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqvm2\" (UniqueName: \"kubernetes.io/projected/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-kube-api-access-vqvm2\") on node \"crc\" DevicePath \"\"" Jan 02 00:22:59 crc kubenswrapper[4766]: I0102 00:22:59.978502 4766 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-utilities\") on node \"crc\" DevicePath \"\"" Jan 02 00:23:00 crc kubenswrapper[4766]: I0102 00:23:00.012363 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6e8610d7-e17b-47e2-bf60-ad1d649a0cd3" (UID: "6e8610d7-e17b-47e2-bf60-ad1d649a0cd3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:23:00 crc kubenswrapper[4766]: I0102 00:23:00.079515 4766 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 02 00:23:00 crc kubenswrapper[4766]: I0102 00:23:00.388239 4766 generic.go:334] "Generic (PLEG): container finished" podID="6e8610d7-e17b-47e2-bf60-ad1d649a0cd3" containerID="29e522a1077b6e237b7b8d1e1da7e68166e4c9442cd752948bd71cfcbb502db6" exitCode=0 Jan 02 00:23:00 crc kubenswrapper[4766]: I0102 00:23:00.388303 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z2c6d" Jan 02 00:23:00 crc kubenswrapper[4766]: I0102 00:23:00.388345 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z2c6d" event={"ID":"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3","Type":"ContainerDied","Data":"29e522a1077b6e237b7b8d1e1da7e68166e4c9442cd752948bd71cfcbb502db6"} Jan 02 00:23:00 crc kubenswrapper[4766]: I0102 00:23:00.389261 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z2c6d" event={"ID":"6e8610d7-e17b-47e2-bf60-ad1d649a0cd3","Type":"ContainerDied","Data":"a399d9d2ebeb94996949d1d49b0a595d2e905a3255fba1bf9e5940f36a41e7b4"} Jan 02 00:23:00 crc kubenswrapper[4766]: I0102 00:23:00.389310 4766 scope.go:117] "RemoveContainer" containerID="29e522a1077b6e237b7b8d1e1da7e68166e4c9442cd752948bd71cfcbb502db6" Jan 02 00:23:00 crc kubenswrapper[4766]: I0102 00:23:00.440014 4766 scope.go:117] "RemoveContainer" containerID="f7dee3190213d1e9581e21f5c3fa44f36a218303ebf9abfebba264eb15c929cc" Jan 02 00:23:00 crc kubenswrapper[4766]: I0102 00:23:00.445618 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z2c6d"] Jan 02 00:23:00 crc kubenswrapper[4766]: I0102 00:23:00.454669 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-z2c6d"] Jan 02 00:23:00 crc kubenswrapper[4766]: I0102 00:23:00.474257 4766 scope.go:117] "RemoveContainer" containerID="5f07c76a5f45c45b85394c960a1adcc86d5647de513ed64bee8819ad0f828680" Jan 02 00:23:00 crc kubenswrapper[4766]: I0102 00:23:00.502453 4766 scope.go:117] "RemoveContainer" containerID="29e522a1077b6e237b7b8d1e1da7e68166e4c9442cd752948bd71cfcbb502db6" Jan 02 00:23:00 crc kubenswrapper[4766]: E0102 00:23:00.503381 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29e522a1077b6e237b7b8d1e1da7e68166e4c9442cd752948bd71cfcbb502db6\": container with ID starting with 29e522a1077b6e237b7b8d1e1da7e68166e4c9442cd752948bd71cfcbb502db6 not found: ID does not exist" containerID="29e522a1077b6e237b7b8d1e1da7e68166e4c9442cd752948bd71cfcbb502db6" Jan 02 00:23:00 crc kubenswrapper[4766]: I0102 00:23:00.503509 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29e522a1077b6e237b7b8d1e1da7e68166e4c9442cd752948bd71cfcbb502db6"} err="failed to get container status \"29e522a1077b6e237b7b8d1e1da7e68166e4c9442cd752948bd71cfcbb502db6\": rpc error: code = NotFound desc = could not find container \"29e522a1077b6e237b7b8d1e1da7e68166e4c9442cd752948bd71cfcbb502db6\": container with ID starting with 29e522a1077b6e237b7b8d1e1da7e68166e4c9442cd752948bd71cfcbb502db6 not found: ID does not exist" Jan 02 00:23:00 crc kubenswrapper[4766]: I0102 00:23:00.503702 4766 scope.go:117] "RemoveContainer" containerID="f7dee3190213d1e9581e21f5c3fa44f36a218303ebf9abfebba264eb15c929cc" Jan 02 00:23:00 crc kubenswrapper[4766]: E0102 00:23:00.505958 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7dee3190213d1e9581e21f5c3fa44f36a218303ebf9abfebba264eb15c929cc\": container with ID starting with f7dee3190213d1e9581e21f5c3fa44f36a218303ebf9abfebba264eb15c929cc not found: ID does not exist" containerID="f7dee3190213d1e9581e21f5c3fa44f36a218303ebf9abfebba264eb15c929cc" Jan 02 00:23:00 crc kubenswrapper[4766]: I0102 00:23:00.506028 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7dee3190213d1e9581e21f5c3fa44f36a218303ebf9abfebba264eb15c929cc"} err="failed to get container status \"f7dee3190213d1e9581e21f5c3fa44f36a218303ebf9abfebba264eb15c929cc\": rpc error: code = NotFound desc = could not find container \"f7dee3190213d1e9581e21f5c3fa44f36a218303ebf9abfebba264eb15c929cc\": container with ID starting with f7dee3190213d1e9581e21f5c3fa44f36a218303ebf9abfebba264eb15c929cc not found: ID does not exist" Jan 02 00:23:00 crc kubenswrapper[4766]: I0102 00:23:00.506078 4766 scope.go:117] "RemoveContainer" containerID="5f07c76a5f45c45b85394c960a1adcc86d5647de513ed64bee8819ad0f828680" Jan 02 00:23:00 crc kubenswrapper[4766]: E0102 00:23:00.506822 4766 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f07c76a5f45c45b85394c960a1adcc86d5647de513ed64bee8819ad0f828680\": container with ID starting with 5f07c76a5f45c45b85394c960a1adcc86d5647de513ed64bee8819ad0f828680 not found: ID does not exist" containerID="5f07c76a5f45c45b85394c960a1adcc86d5647de513ed64bee8819ad0f828680" Jan 02 00:23:00 crc kubenswrapper[4766]: I0102 00:23:00.507070 4766 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f07c76a5f45c45b85394c960a1adcc86d5647de513ed64bee8819ad0f828680"} err="failed to get container status \"5f07c76a5f45c45b85394c960a1adcc86d5647de513ed64bee8819ad0f828680\": rpc error: code = NotFound desc = could not find container \"5f07c76a5f45c45b85394c960a1adcc86d5647de513ed64bee8819ad0f828680\": container with ID starting with 5f07c76a5f45c45b85394c960a1adcc86d5647de513ed64bee8819ad0f828680 not found: ID does not exist" Jan 02 00:23:01 crc kubenswrapper[4766]: I0102 00:23:01.541925 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e8610d7-e17b-47e2-bf60-ad1d649a0cd3" path="/var/lib/kubelet/pods/6e8610d7-e17b-47e2-bf60-ad1d649a0cd3/volumes" Jan 02 00:23:06 crc kubenswrapper[4766]: I0102 00:23:06.446017 4766 generic.go:334] "Generic (PLEG): container finished" podID="f15bc032-b4e0-46f9-bbac-c36f6776d77b" containerID="c111172d910b04563febd761f3908e27e594e2744b0cf25b7c2f637bda396c2d" exitCode=0 Jan 02 00:23:06 crc kubenswrapper[4766]: I0102 00:23:06.446155 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bsr5x/must-gather-nclqp" event={"ID":"f15bc032-b4e0-46f9-bbac-c36f6776d77b","Type":"ContainerDied","Data":"c111172d910b04563febd761f3908e27e594e2744b0cf25b7c2f637bda396c2d"} Jan 02 00:23:06 crc kubenswrapper[4766]: I0102 00:23:06.447040 4766 scope.go:117] "RemoveContainer" containerID="c111172d910b04563febd761f3908e27e594e2744b0cf25b7c2f637bda396c2d" Jan 02 00:23:06 crc kubenswrapper[4766]: I0102 00:23:06.664319 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-bsr5x_must-gather-nclqp_f15bc032-b4e0-46f9-bbac-c36f6776d77b/gather/0.log" Jan 02 00:23:13 crc kubenswrapper[4766]: I0102 00:23:13.211488 4766 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-bsr5x/must-gather-nclqp"] Jan 02 00:23:13 crc kubenswrapper[4766]: I0102 00:23:13.212272 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-bsr5x/must-gather-nclqp" podUID="f15bc032-b4e0-46f9-bbac-c36f6776d77b" containerName="copy" containerID="cri-o://abc83b14aa8fff38a8d1e130a680a99d719773c5803abcf93165d33ffd678c4e" gracePeriod=2 Jan 02 00:23:13 crc kubenswrapper[4766]: I0102 00:23:13.223680 4766 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-bsr5x/must-gather-nclqp"] Jan 02 00:23:13 crc kubenswrapper[4766]: I0102 00:23:13.507005 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-bsr5x_must-gather-nclqp_f15bc032-b4e0-46f9-bbac-c36f6776d77b/copy/0.log" Jan 02 00:23:13 crc kubenswrapper[4766]: I0102 00:23:13.507681 4766 generic.go:334] "Generic (PLEG): container finished" podID="f15bc032-b4e0-46f9-bbac-c36f6776d77b" containerID="abc83b14aa8fff38a8d1e130a680a99d719773c5803abcf93165d33ffd678c4e" exitCode=143 Jan 02 00:23:13 crc kubenswrapper[4766]: I0102 00:23:13.551543 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-bsr5x_must-gather-nclqp_f15bc032-b4e0-46f9-bbac-c36f6776d77b/copy/0.log" Jan 02 00:23:13 crc kubenswrapper[4766]: I0102 00:23:13.552110 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bsr5x/must-gather-nclqp" Jan 02 00:23:13 crc kubenswrapper[4766]: I0102 00:23:13.677908 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f15bc032-b4e0-46f9-bbac-c36f6776d77b-must-gather-output\") pod \"f15bc032-b4e0-46f9-bbac-c36f6776d77b\" (UID: \"f15bc032-b4e0-46f9-bbac-c36f6776d77b\") " Jan 02 00:23:13 crc kubenswrapper[4766]: I0102 00:23:13.678063 4766 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwprk\" (UniqueName: \"kubernetes.io/projected/f15bc032-b4e0-46f9-bbac-c36f6776d77b-kube-api-access-mwprk\") pod \"f15bc032-b4e0-46f9-bbac-c36f6776d77b\" (UID: \"f15bc032-b4e0-46f9-bbac-c36f6776d77b\") " Jan 02 00:23:13 crc kubenswrapper[4766]: I0102 00:23:13.686126 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f15bc032-b4e0-46f9-bbac-c36f6776d77b-kube-api-access-mwprk" (OuterVolumeSpecName: "kube-api-access-mwprk") pod "f15bc032-b4e0-46f9-bbac-c36f6776d77b" (UID: "f15bc032-b4e0-46f9-bbac-c36f6776d77b"). InnerVolumeSpecName "kube-api-access-mwprk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 02 00:23:13 crc kubenswrapper[4766]: I0102 00:23:13.741935 4766 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f15bc032-b4e0-46f9-bbac-c36f6776d77b-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "f15bc032-b4e0-46f9-bbac-c36f6776d77b" (UID: "f15bc032-b4e0-46f9-bbac-c36f6776d77b"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 02 00:23:13 crc kubenswrapper[4766]: I0102 00:23:13.779397 4766 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwprk\" (UniqueName: \"kubernetes.io/projected/f15bc032-b4e0-46f9-bbac-c36f6776d77b-kube-api-access-mwprk\") on node \"crc\" DevicePath \"\"" Jan 02 00:23:13 crc kubenswrapper[4766]: I0102 00:23:13.779440 4766 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f15bc032-b4e0-46f9-bbac-c36f6776d77b-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 02 00:23:14 crc kubenswrapper[4766]: I0102 00:23:14.515298 4766 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-bsr5x_must-gather-nclqp_f15bc032-b4e0-46f9-bbac-c36f6776d77b/copy/0.log" Jan 02 00:23:14 crc kubenswrapper[4766]: I0102 00:23:14.515745 4766 scope.go:117] "RemoveContainer" containerID="abc83b14aa8fff38a8d1e130a680a99d719773c5803abcf93165d33ffd678c4e" Jan 02 00:23:14 crc kubenswrapper[4766]: I0102 00:23:14.515903 4766 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bsr5x/must-gather-nclqp" Jan 02 00:23:14 crc kubenswrapper[4766]: I0102 00:23:14.536994 4766 scope.go:117] "RemoveContainer" containerID="c111172d910b04563febd761f3908e27e594e2744b0cf25b7c2f637bda396c2d" Jan 02 00:23:15 crc kubenswrapper[4766]: I0102 00:23:15.541648 4766 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f15bc032-b4e0-46f9-bbac-c36f6776d77b" path="/var/lib/kubelet/pods/f15bc032-b4e0-46f9-bbac-c36f6776d77b/volumes" Jan 02 00:23:24 crc kubenswrapper[4766]: I0102 00:23:24.180496 4766 patch_prober.go:28] interesting pod/machine-config-daemon-hlsf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 02 00:23:24 crc kubenswrapper[4766]: I0102 00:23:24.181809 4766 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 02 00:23:54 crc kubenswrapper[4766]: I0102 00:23:54.181338 4766 patch_prober.go:28] interesting pod/machine-config-daemon-hlsf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 02 00:23:54 crc kubenswrapper[4766]: I0102 00:23:54.182046 4766 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 02 00:23:54 crc kubenswrapper[4766]: I0102 00:23:54.182105 4766 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" Jan 02 00:23:54 crc kubenswrapper[4766]: I0102 00:23:54.182827 4766 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f3099eceeb9c369e0ae2f8da79be91ae54a2ea0c86d32b84fd1ed6572982cd8c"} pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 02 00:23:54 crc kubenswrapper[4766]: I0102 00:23:54.182956 4766 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" podUID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerName="machine-config-daemon" containerID="cri-o://f3099eceeb9c369e0ae2f8da79be91ae54a2ea0c86d32b84fd1ed6572982cd8c" gracePeriod=600 Jan 02 00:23:55 crc kubenswrapper[4766]: I0102 00:23:55.033502 4766 generic.go:334] "Generic (PLEG): container finished" podID="a62872f4-c5c6-4f43-ba8a-40e0c96e6968" containerID="f3099eceeb9c369e0ae2f8da79be91ae54a2ea0c86d32b84fd1ed6572982cd8c" exitCode=0 Jan 02 00:23:55 crc kubenswrapper[4766]: I0102 00:23:55.033579 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" event={"ID":"a62872f4-c5c6-4f43-ba8a-40e0c96e6968","Type":"ContainerDied","Data":"f3099eceeb9c369e0ae2f8da79be91ae54a2ea0c86d32b84fd1ed6572982cd8c"} Jan 02 00:23:55 crc kubenswrapper[4766]: I0102 00:23:55.034058 4766 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hlsf9" event={"ID":"a62872f4-c5c6-4f43-ba8a-40e0c96e6968","Type":"ContainerStarted","Data":"10a1dd0a8d1ee2b7e815d9d7ddad6fbf59c77be2b4687b055d247872c79b528b"} Jan 02 00:23:55 crc kubenswrapper[4766]: I0102 00:23:55.034100 4766 scope.go:117] "RemoveContainer" containerID="690529f4c1451fbb7a4daf40afad9fe675d9ba0699ee41538a56e7145e9d3df2" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515125610173024446 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015125610173017363 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015125605540016510 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015125605540015460 5ustar corecore